
Recherche avancée
Médias (1)
-
Somos millones 1
21 juillet 2014, par
Mis à jour : Juin 2015
Langue : français
Type : Video
Autres articles (48)
-
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...) -
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir
Sur d’autres sites (7117)
-
FFMpeg CUDA yuvj420p frame conversion to cv::Mat layers shifted
26 février 2023, par AcidTonicI am trying to retrieve hardware decoded H264 frames from the cuda backend of ffmpeg and display them as a cv::Mat. I got decently far and was able to get color images but it seems the conversion is not quite right as the image I get has a green bar at the top and if you look closely the blue parts of the image are offset down and to the right a little bit making everything look a little wonky.


Correct Image as shown by ffplay using the same driver


Image I am getting



Here is the full source code in the hopes someone can help me to get the correct image here...


#include 

#include 

#include 

#include 

#include 

#include 

#include <iostream>

#include <fstream>

#include <cstdlib>

#include <chrono>

#include <cstring>

extern "C" {

 //Linker errors if not inside extern. FFMPEG headers are not C++ aware
 #include <libavcodec></libavcodec>avcodec.h>

 #include <libavformat></libavformat>avformat.h>

 #include <libavutil></libavutil>pixdesc.h>

 #include <libavutil></libavutil>hwcontext.h>

 #include <libavutil></libavutil>opt.h>

 #include <libavutil></libavutil>avassert.h>

 #include <libavutil></libavutil>imgutils.h>

}

#include <iomanip>

#include <string>

#include <sstream>

#include <opencv2></opencv2>opencv.hpp>

#ifdef __cplusplus
extern "C" {
 #endif // __cplusplus
 #include <libavdevice></libavdevice>avdevice.h>

 #include <libavfilter></libavfilter>avfilter.h>

 #include <libavformat></libavformat>avio.h>

 #include <libavutil></libavutil>avutil.h>

 #include <libpostproc></libpostproc>postprocess.h>

 #include <libswresample></libswresample>swresample.h>

 #include <libswscale></libswscale>swscale.h>

 #ifdef __cplusplus
} // end extern "C".
#endif // __cplusplus

static AVBufferRef * hw_device_ctx = NULL;
static enum AVPixelFormat hw_pix_fmt;
static FILE * output_file_fd = NULL;
cv::Mat output_mat;
int bgr_size;

static int hw_decoder_init(AVCodecContext * ctx,
 const enum AVHWDeviceType type) {
 int err = 0;

 if ((err = av_hwdevice_ctx_create( & hw_device_ctx, type,
 NULL, NULL, 0)) < 0) {
 fprintf(stderr, "Failed to create specified HW device.\n");
 return err;
 }
 ctx -> hw_device_ctx = av_buffer_ref(hw_device_ctx);

 return err;
}

static enum AVPixelFormat get_hw_format(AVCodecContext * ctx,
 const enum AVPixelFormat * pix_fmts) {
 const enum AVPixelFormat * p;

 for (p = pix_fmts;* p != -1; p++) {
 if ( * p == hw_pix_fmt)
 return * p;
 }

 fprintf(stderr, "Failed to get HW surface format.\n");
 return AV_PIX_FMT_NONE;
}

static int decode_write(AVCodecContext * avctx, AVPacket * packet) {
 AVFrame * frame = NULL, * sw_frame = NULL;
 AVFrame * tmp_frame = NULL;
 uint8_t * buffer = NULL;
 int size;
 int ret = 0;

 ret = avcodec_send_packet(avctx, packet);
 if (ret < 0) {
 fprintf(stderr, "Error during decoding\n");
 return ret;
 }

 while (1) {
 if (!(frame = av_frame_alloc()) || !(sw_frame = av_frame_alloc())) {
 fprintf(stderr, "Can not alloc frame\n");
 ret = AVERROR(ENOMEM);
 av_frame_free( & frame);
 av_frame_free( & sw_frame);
 av_freep( & buffer);
 if (ret < 0) {
 return ret;
 }

 }

 ret = avcodec_receive_frame(avctx, frame);
 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
 av_frame_free( & frame);
 av_frame_free( & sw_frame);
 return 0;
 } else if (ret < 0) {
 fprintf(stderr, "Error while decoding\n");
 av_frame_free( & frame);
 av_frame_free( & sw_frame);
 av_freep( & buffer);
 if (ret < 0) {
 return ret;
 }

 }

 if (frame -> format == hw_pix_fmt) {
 /* retrieve data from GPU to CPU */
 if ((ret = av_hwframe_transfer_data(sw_frame, frame, 0)) < 0) {
 fprintf(stderr, "Error transferring the data to system memory\n");
 av_frame_free( & frame);
 av_frame_free( & sw_frame);
 av_freep( & buffer);
 if (ret < 0) {
 return ret;
 }

 }
 tmp_frame = sw_frame;
 } else {
 tmp_frame = frame;
 }

 AVPixelFormat format_to_use = AV_PIX_FMT_YUVJ420P;
 cv::Mat mat_src = cv::Mat(sw_frame -> height + (sw_frame -> height / 2), sw_frame -> width, CV_8UC1, sw_frame -> data[0]);
 cv::Mat out_mat;
 cv::cvtColor(mat_src, out_mat, cv::COLOR_YUV2RGB_NV21);

 output_mat = out_mat;

 if (output_mat.empty() == false) {
 cv::imshow("image", output_mat);
 cv::waitKey(1);
 }

 av_frame_free( & frame);
 av_frame_free( & sw_frame);
 av_freep( & buffer);
 return ret;
 }
}

TEST_CASE("CUDAH264", "Tests hardware h264 decoding") {

 AVFormatContext * input_ctx = NULL;
 int video_stream, ret;
 AVStream * video = NULL;
 AVCodecContext * decoder_ctx = NULL;
 AVCodec * decoder = NULL;
 AVPacket * packet = NULL;
 enum AVHWDeviceType type;
 int i;

 std::string device_type = "cuda";
 std::string input_file = "rtsp://10.100.2.152"; //My H264 network stream here...

 /* The stream data is below...
 Input #0, rtsp, from 'rtsp://10.100.2.152':
 Metadata:
 title : VCP IPC Realtime stream
 Duration: N/A, start: 0.000000, bitrate: N/A
 Stream #0:0: Video: h264 (High), yuvj420p(pc, bt709, progressive), 1920x1080, 10 fps, 10 tbr, 90k tbn, 20 tbc
 */

 type = av_hwdevice_find_type_by_name(device_type.c_str());
 if (type == AV_HWDEVICE_TYPE_NONE) {
 fprintf(stderr, "Device type %s is not supported.\n", device_type.c_str());
 fprintf(stderr, "Available device types:");
 while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
 fprintf(stderr, " %s", av_hwdevice_get_type_name(type));
 fprintf(stderr, "\n");
 throw std::runtime_error("Error");
 }

 packet = av_packet_alloc();
 if (!packet) {
 fprintf(stderr, "Failed to allocate AVPacket\n");
 throw std::runtime_error("Error");
 }

 /* open the input file */
 if (avformat_open_input( & input_ctx, input_file.c_str(), NULL, NULL) != 0) {
 fprintf(stderr, "Cannot open input file '%s'\n", input_file.c_str());
 throw std::runtime_error("Error");
 }

 if (avformat_find_stream_info(input_ctx, NULL) < 0) {
 fprintf(stderr, "Cannot find input stream information.\n");
 throw std::runtime_error("Error");
 }

 av_dump_format(input_ctx, 0, input_file.c_str(), 0);

 for (int i = 0; i < input_ctx -> nb_streams; i++) {
 auto pCodec = avcodec_find_decoder(input_ctx -> streams[i] -> codecpar -> codec_id);
 auto pCodecCtx = avcodec_alloc_context3(pCodec);
 avcodec_parameters_to_context(pCodecCtx, input_ctx -> streams[i] -> codecpar);

 printf("Found Video stream with ID: %d\n", input_ctx -> streams[i] -> id);
 printf("\t Stream Index: %d\n", input_ctx -> streams[i] -> index);

 AVCodecParameters * codecpar = input_ctx -> streams[i] -> codecpar;
 printf("\t Codec Type: %s\n", av_get_media_type_string(codecpar -> codec_type));
 printf("\t Side data count: %d\n", input_ctx -> streams[i] -> nb_side_data);
 printf("\t Pixel format: %i\n", input_ctx -> streams[i] -> codecpar -> format);
 printf("\t Pixel Format Name: %s\n", av_get_pix_fmt_name((AVPixelFormat) input_ctx -> streams[i] -> codecpar -> format));
 printf("\t Metadata count: %d\n", av_dict_count(input_ctx -> streams[i] -> metadata));
 }

 /* find the video stream information */
 ret = av_find_best_stream(input_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, & decoder, 0);
 if (ret < 0) {
 fprintf(stderr, "Cannot find a video stream in the input file\n");
 throw std::runtime_error("Error");
 }

 video_stream = ret;

 for (i = 0;; i++) {
 const AVCodecHWConfig * config = avcodec_get_hw_config(decoder, i);
 if (!config) {
 fprintf(stderr, "Decoder %s does not support device type %s.\n",
 decoder -> name, av_hwdevice_get_type_name(type));
 throw std::runtime_error("Error");
 }
 if (config -> methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX &&
 config -> device_type == type) {
 hw_pix_fmt = config -> pix_fmt;
 break;
 }
 }

 if (!(decoder_ctx = avcodec_alloc_context3(decoder))) {
 throw std::runtime_error("NO MEMORY");
 }

 video = input_ctx -> streams[video_stream];
 if (avcodec_parameters_to_context(decoder_ctx, video -> codecpar) < 0) {
 throw std::runtime_error("Error");
 }

 decoder_ctx -> get_format = get_hw_format;

 if (hw_decoder_init(decoder_ctx, type) < 0) {
 throw std::runtime_error("Error");
 }

 if ((ret = avcodec_open2(decoder_ctx, decoder, NULL)) < 0) {
 fprintf(stderr, "Failed to open codec for stream #%u\n", video_stream);
 throw std::runtime_error("Error");
 }

 /* actual decoding and dump the raw data */
 while (ret >= 0) {
 if ((ret = av_read_frame(input_ctx, packet)) < 0)
 break;

 if (video_stream == packet -> stream_index)
 ret = decode_write(decoder_ctx, packet);

 av_packet_unref(packet);
 }

 /* flush the decoder */
 ret = decode_write(decoder_ctx, NULL);

 if (output_file_fd) {
 fclose(output_file_fd);
 }
 av_packet_free( & packet);
 avcodec_free_context( & decoder_ctx);
 avformat_close_input( & input_ctx);
 av_buffer_unref( & hw_device_ctx);

}
</sstream></string></iomanip></cstring></chrono></cstdlib></fstream></iostream>


-
RaspberryPi HLS streaming with nginx and ffmpeg ; v4l2 error : ioctl(VIDIOC_STREAMON) : Protocol error
22 janvier 2021, par Mirco WeberI'm trying to realize a baby monitoring with a Raspberry Pi (Model 4B, 4GB RAM) and an ordinary Webcam (with integrated Mic).
I followed this Tutorial : https://github.com/DeTeam/webcam-stream/blob/master/Tutorial.md


Shortly described :


- 

- I installed and configured an nginx server with rtmp module enabled.
- I installed ffmpeg with this configuration —enable-gpl —enable-nonfree —enable-mmal —enable-omx-rpi
- I tried to stream ;)








The configuration of nginx seems to be working (sometimes streaming works, the server starts without any complication and when the server is up and running, the webpage is displayed).
The configuration of ffmpeg seems to be fine as well, since streaming sometimes works...


I was trying a couple of different ffmpeg-commands ; all of them are sometimes working and sometimes resulting in an error.
The command looks like following :


ffmpeg -re
-f v4l2
-i /dev/video0
-f alsa
-ac 1
-thread_queue_size 4096
-i hw:CARD=Camera,DEV=0
-profile:v high
-level:v 4.1
-vcodec h264_omx
-r 10
-b:v 512k
-s 640x360
-acodec aac
-strict
-2
-ac 2
-ab 32k
-ar 44100
-f flv
rtmp://localhost/show/stream;



Note : I rearranged the code to make it easier to read. In the terminal, it is all in one line.
Note : There is no difference when using
-f video4linux2
instead of-f v4l2


The camera is recognized by the system :


pi@raspberrypi:~ $ v4l2-ctl --list-devices
bcm2835-codec-decode (platform:bcm2835-codec):
 /dev/video10
 /dev/video11
 /dev/video12

bcm2835-isp (platform:bcm2835-isp):
 /dev/video13
 /dev/video14
 /dev/video15
 /dev/video16

HD Web Camera: HD Web Camera (usb-0000:01:00.0-1.2):
 /dev/video0
 /dev/video1



When only using
-i /dev/video0
, audio transmission never worked.
The output ofarecord -L
was :

pi@raspberrypi:~ $ arecord -L
default
 Playback/recording through the PulseAudio sound server
null
 Discard all samples (playback) or generate zero samples (capture)
jack
 JACK Audio Connection Kit
pulse
 PulseAudio Sound Server
usbstream:CARD=Headphones
 bcm2835 Headphones
 USB Stream Output
sysdefault:CARD=Camera
 HD Web Camera, USB Audio
 Default Audio Device
front:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 Front speakers
surround21:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 2.1 Surround output to Front and Subwoofer speakers
surround40:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 4.0 Surround output to Front and Rear speakers
surround41:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 4.1 Surround output to Front, Rear and Subwoofer speakers
surround50:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 5.0 Surround output to Front, Center and Rear speakers
surround51:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 5.1 Surround output to Front, Center, Rear and Subwoofer speakers
surround71:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 7.1 Surround output to Front, Center, Side, Rear and Woofer speakers
iec958:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 IEC958 (S/PDIF) Digital Audio Output
dmix:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 Direct sample mixing device
dsnoop:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 Direct sample snooping device
hw:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 Direct hardware device without any conversions
plughw:CARD=Camera,DEV=0
 HD Web Camera, USB Audio
 Hardware device with all software conversions
usbstream:CARD=Camera
 HD Web Camera
 USB Stream Output



that's why i added
-i hw:CARD=Camera,DEV=0
.

As mentioned above, it worked very well a couple of times with this configuration and commands.
But very often, i get the following error message when starting to stream :


pi@raspberrypi:~ $ ffmpeg -re -f video4linux2 -i /dev/video0 -f alsa -ac 1 -thread_queue_size 4096 -i hw:CARD=Camera,DEV=0 -profile:v high -level:v 4.1 -vcodec h264_omx -r 10 -b:v 512k -s 640x360 -acodec aac -strict -2 -ac 2 -ab 32k -ar 44100 -f flv rtmp://localhost/show/stream
ffmpeg version N-100673-g553eb07737 Copyright (c) 2000-2021 the FFmpeg developers
 built with gcc 8 (Raspbian 8.3.0-6+rpi1)
 configuration: --enable-gpl --enable-nonfree --enable-mmal --enable-omx-rpi --extra-ldflags=-latomic
 libavutil 56. 63.101 / 56. 63.101
 libavcodec 58.117.101 / 58.117.101
 libavformat 58. 65.101 / 58. 65.101
 libavdevice 58. 11.103 / 58. 11.103
 libavfilter 7. 96.100 / 7. 96.100
 libswscale 5. 8.100 / 5. 8.100
 libswresample 3. 8.100 / 3. 8.100
 libpostproc 55. 8.100 / 55. 8.100
[video4linux2,v4l2 @ 0x2ea4600] ioctl(VIDIOC_STREAMON): Protocol error
/dev/video0: Protocol error



And when I'm swithing to
/dev/video1
(since this was also an output forv4l2-ctl --list-devices
), I get the following error message :

pi@raspberrypi:~ $ ffmpeg -re -f v4l2 -i /dev/video1 -f alsa -ac 1 -thread_queue_size 4096 -i hw:CARD=Camera,DEV=0 -profile:v high -level:v 4.1 -vcodec h264_omx -r 10 -b:v 512k -s 640x360 -acodec aac -strict -2 -ac 2 -ab 32k -ar 44100 -f flv rtmp://localhost/show/stream
ffmpeg version N-100673-g553eb07737 Copyright (c) 2000-2021 the FFmpeg developers
 built with gcc 8 (Raspbian 8.3.0-6+rpi1)
 configuration: --enable-gpl --enable-nonfree --enable-mmal --enable-omx-rpi --extra-ldflags=-latomic
 libavutil 56. 63.101 / 56. 63.101
 libavcodec 58.117.101 / 58.117.101
 libavformat 58. 65.101 / 58. 65.101
 libavdevice 58. 11.103 / 58. 11.103
 libavfilter 7. 96.100 / 7. 96.100
 libswscale 5. 8.100 / 5. 8.100
 libswresample 3. 8.100 / 3. 8.100
 libpostproc 55. 8.100 / 55. 8.100
[video4linux2,v4l2 @ 0x1aa4610] ioctl(VIDIOC_G_INPUT): Inappropriate ioctl for device
/dev/video1: Inappropriate ioctl for device



When using the
video0
input, the webcam's LED that recognizes an access is constantly on. When usingvideo1
not.

After hours and days of googling and tears and whiskey, for the sake of my liver, my marriage and my physical and mental health, I'm very sincerly asking for your help...
What the f**k is happening and what can I do to make it work ???


Thanks everybody :)


UPDATE 1 :


- 

- using the full path to ffmpeg does not change anything...
- /dev/video0 and /dev/video1 have access rights for everybody
sudo ffmpeg ...
does not change anything as well- the problem seems to be at an "early stage". Stripping the command down to
ffmpeg -i /dev/video0
results in the same problem










UPDATE 2 :

It seems that everything is working when I first start another Application that needs access to the webcam and then ffmpeg...
Might be some driver issue, but when I'm looking for loaded modules withlsmod
, there is absolutely no change before and after I started the application...
Any help still appreciated...

UPDATE 3 :

I was checking the output ofdmesg
.

When I started the first application I received this message :

uvcvideo: Failed to query (GET_DEF) UVC control 12 on unit 2: -32 (exp. 4).


And when I startedffmpeg
, nothing happend but everything worked...

-
FFMPEG can't find audio device for webcam on one computer
13 mars 2018, par DavidI am building a webcam recording app. I am using FFMPEG to capture the audio and video. I have a Logitech C930 webcam.
>ffmpeg -list_devices true -f dshow -i dummy ffmpeg version N-90155-g28924f4b48 Copyright (c) 2000-2018 the FFmpeg developers built with gcc 7.3.0 (GCC) configuration : —enable-gpl —enable-version3 —enable-sdl2 —enable-bzlib —enable-fontconfig —enable-gnutls —enable-iconv —enable-libass —enable-libbluray —enable-libfreetype —enable-libmp3lame —enable-libopencore-amrnb —enable-libopencore-amrwb —enable-libopenjpeg —enable-libopus —enable-libshine —enable-libsnappy —enable-libsoxr —enable-libtheora —enable-libtwolame —enable-libvpx —enable-libwavpack —enable-libwebp —enable-libx264 —enable-libx265 —enable-libxml2 —enable-libzimg —enable-lzma —enable-zlib —enable-gmp —enable-libvidstab —enable-libvorbis —enable-libvo-amrwbenc —enable-libmysofa —enable-libspeex —enable-libxvid —enable-libmfx —enable-amf —enable-cuda —enable-cuvid —enable-d3d11va —enable-nvenc —enable-dxva2 —enable-avisynth libavutil 56. 7.101 / 56. 7.101 libavcodec 58. 12.102 / 58. 12.102 libavformat 58. 9.100 / 58. 9.100 libavdevice 58. 2.100 / 58. 2.100 libavfilter 7. 12.100 / 7. 12.100 libswscale 5. 0.101 / 5. 0.101 libswresample 3. 0.101 / 3. 0.101 libpostproc 55. 0.100 / 55. 0.100 [dshow @ 0000024786c38680] DirectShow video devices (some may be both video and audio devices) [dshow @ 0000024786c38680] "Microsoft Camera Front" [dshow @ 0000024786c38680] Alternative name "@device_pnp_~?\display#int3470#4&1835d135&0&uid13424#65e8773d-8f56-11d0-a3b9-00a0c9223196bf89b5a5-61f7-4127-a279-e187013d7caf" [dshow @ 0000024786c38680] "Microsoft Camera Rear" [dshow @ 0000024786c38680] Alternative name "@device_pnp_~?\display#int3470#4&1835d135&0&uid13424#65e8773d-8f56-11d0-a3b9-00a0c92231967c9bbcea-909c-47b3-8cf9-2aa8237e1d4b" [dshow @ 0000024786c38680] "Logitech Webcam C930e" [dshow @ 0000024786c38680] Alternative name "@device_pnp_~?\usb#vid_046d&pid_0843&mi_00#8&dd2d9e1&0&0000#65e8773d-8f56-11d0-a3b9-00a0c9223196\global" [dshow @ 0000024786c38680] "ChromaCam" [dshow @ 0000024786c38680] Alternative name "@device_sw_860BB310-5D01-11D0-BD3B-00A0C911CE863403512D-FE5D-4D68-B053-E86D662E4D58" [dshow @ 0000024786c38680] DirectShow audio devices [dshow @ 0000024786c38680] "Microphone (2- Logitech Webcam C930e)" [dshow @ 0000024786c38680] Alternative name "@device_cm_33D9A762-90C8-11D0-BD43-00A0C911CE86\wave_1AF7CBFB-477F-44D9-B8C4-E178F036B5D1" dummy : Immediate exit requested
Notice the "Microphone (2- Logitech Webcam C930e)" audio device. I can capture from it using FFMPEG.
Now, I move the same camera to a different computer (Intel Compute Stick) running nearly the same build of windows (16299.251 on the stick, 16299.248 on the laptop). Stick is 32-bit home and laptop is 64-bit Pro (shouldn’t matter, should it ?).
I run the same command with the exact same camera and same drivers installed :>ffmpeg -list_devices true -f dshow -i dummy ffmpeg version 3.4.2 Copyright (c) 2000-2018 the FFmpeg developers built with gcc 7.3.0 (GCC) configuration : —enable-gpl —enable-version3 —enable-sdl2 —enable-bzlib —enable-fontconfig —enable-gnutls —enable-iconv —enable-libass —enable-libbluray —enable-libfreetype —enable-libmp3lame —enable-libopencore-amrnb —enable-libopencore-amrwb —enable-libopenjpeg —enable-libopus —enable-libshine —enable-libsnappy —enable-libsoxr —enable-libtheora —enable-libtwolame —enable-libvpx —enable-libwavpack —enable-libwebp —enable-libx264 —enable-libx265 —enable-libxml2 —enable-libzimg —enable-lzma —enable-zlib —enable-gmp —enable-libvidstab —enable-libvorbis —enable-libvo-amrwbenc —enable-libmysofa —enable-libspeex —enable-libxvid —enable-libmfx —enable-cuda —enable-cuvid —enable-d3d11va —enable-nvenc —enable-dxva2 —enable-avisynth libavutil 55. 78.100 / 55. 78.100 libavcodec 57.107.100 / 57.107.100 libavformat 57. 83.100 / 57. 83.100 libavdevice 57. 10.100 / 57. 10.100 libavfilter 6.107.100 / 6.107.100 libswscale 4. 8.100 / 4. 8.100 libswresample 2. 9.100 / 2. 9.100 libpostproc 54. 7.100 / 54. 7.100 [dshow @ 00797be0] DirectShow video devices (some may be both video and audio devices) [dshow @ 00797be0] "Logitech Webcam C930e" [dshow @ 00797be0] Alternative name "@device_pnp_~?\usb#vid_046d&pid_0843&mi_00#6&dd2d9e1&0&0000#65e8773d-8f56-11d0-a3b9-00a0c9223196\global" [dshow @ 00797be0] DirectShow audio devices [dshow @ 00797be0] Could not enumerate audio only devices (or none found). dummy : Immediate exit requested
No Audio device listed ? What ? If I use python’s sounddevice module, it sees the audio device :
> python -m sounddevice 0 Microsoft Sound Mapper - Output, MME (0 in, 2 out) 1 Remote Audio, MME (0 in, 2 out) 2 Primary Sound Driver, Windows DirectSound (0 in, 2 out) 3 Remote Audio, Windows DirectSound (0 in, 2 out) 4 Remote Audio, Windows WASAPI (0 in, 2 out) 5 Microphone (Logitech Webcam C930e), Windows WDM-KS (2 in, 0 out)
I can capture from that audio device with sounddevice, but I can’t get FFMPEG to capture from it.
>ffmpeg -report -rtbufsize 50000k-f dshow -i video="Logitech Webcam C930e":audio="Microphone (Logitech Webcam C930e)" -t 120 -y -s 1920x1080 -q 2 -video_pin_name 1 -audio_pin_name 5 -filter:v setpts=1*PTS Watchmen_20180312095119_c.avi ffmpeg started on 2018-03-12 at 17:07:08 Report written to "ffmpeg-20180312-170708.log" ffmpeg version 3.4.2 Copyright (c) 2000-2018 the FFmpeg developers built with gcc 7.3.0 (GCC) configuration : —enable-gpl —enable-version3 —enable-sdl2 —enable-bzlib —enable-fontconfig —enable-gnutls —enable-iconv —enable-libass —enable-libbluray —enable-libfreetype —enable-libmp3lame —enable-libopencore-amrnb —enable-libopencore-amrwb —enable-libopenjpeg —enable-libopus —enable-libshine —enable-libsnappy —enable-libsoxr —enable-libtheora —enable-libtwolame —enable-libvpx —enable-libwavpack —enable-libwebp —enable-libx264 —enable-libx265 —enable-libxml2 —enable-libzimg —enable-lzma —enable-zlib —enable-gmp —enable-libvidstab —enable-libvorbis —enable-libvo-amrwbenc —enable-libmysofa —enable-libspeex —enable-libxvid —enable-libmfx —enable-cuda —enable-cuvid —enable-d3d11va —enable-nvenc —enable-dxva2 —enable-avisynth libavutil 55. 78.100 / 55. 78.100 libavcodec 57.107.100 / 57.107.100 libavformat 57. 83.100 / 57. 83.100 libavdevice 57. 10.100 / 57. 10.100 libavfilter 6.107.100 / 6.107.100 libswscale 4. 8.100 / 4. 8.100 libswresample 2. 9.100 / 2. 9.100 libpostproc 54. 7.100 / 54. 7.100 video=Logitech Webcam C930e:audio=Microphone (Logitech Webcam C930e) : No such file or directory
I have been trying to debug by removing and reinstalling drivers, etc, but to no avail. I also grabbed a Microsoft Lifecam webcam and got similar results. I can see the lifecam’s audio on the laptop but not the stick in just the same manner as the logitech camera.
How can I get the audio device to show up on the stick so I can capture from it ?