Newest 'ffmpeg' Questions - Stack Overflow
Les articles publiés sur le site
-
Decode audio and video and process both flows — ffmpeg, sdl, opencv
24 février 2012, par EricMy goal is to proceed on audio and video of mpeg-2 file independently, and to keep synchronicity on both flows. Duration of video is about 1 or 2 minutes maximum.
First, following this post "opencv for reading videos (and do the process),ffmpeg for audio , and SDL used to play both" sounds perfect. I have done some modification on the code considering recent ffmpeg naming changes. Compilation with cmake on 64-bits machine is fine. I get an error "Unsupported codec [3]" when opening codec. The code is following.
Second, I looking for code dealing with synchronicity on both flows.
#include "opencv/highgui.h" #include "opencv/cv.h" #ifndef INT64_C #define INT64_C(c) (c ## LL) #define UINT64_C(c) (c ## ULL) #endif extern "C"{ #include
SDL.h> #include SDL_thread.h> #include avcodec.h> #include avformat.h> } #include #include #include using namespace cv; #define SDL_AUDIO_BUFFER_SIZE 1024 typedef struct PacketQueue { AVPacketList *first_pkt, *last_pkt; int nb_packets; int size; SDL_mutex *mutex; SDL_cond *cond; } PacketQueue; PacketQueue audioq; int audioStream = -1; int videoStream = -1; int quit = 0; SDL_Surface* screen = NULL; SDL_Surface* surface = NULL; AVFormatContext* pFormatCtx = NULL; AVCodecContext* aCodecCtx = NULL; AVCodecContext* pCodecCtx = NULL; void show_frame(IplImage* img){ if (!screen){ screen = SDL_SetVideoMode(img->width, img->height, 0, 0); if (!screen){ fprintf(stderr, "SDL: could not set video mode - exiting\n"); exit(1); } } // Assuming IplImage packed as BGR 24bits SDL_Surface* surface = SDL_CreateRGBSurfaceFrom((void*)img->imageData, img->width, img->height, img->depth * img->nChannels, img->widthStep, 0xff0000, 0x00ff00, 0x0000ff, 0 ); SDL_BlitSurface(surface, 0, screen, 0); SDL_Flip(screen); } void packet_queue_init(PacketQueue *q){ memset(q, 0, sizeof(PacketQueue)); q->mutex = SDL_CreateMutex(); q->cond = SDL_CreateCond(); } int packet_queue_put(PacketQueue *q, AVPacket *pkt){ AVPacketList *pkt1; if (av_dup_packet(pkt) < 0){ return -1; } pkt1 = (AVPacketList*) av_malloc(sizeof(AVPacketList)); //pkt1 = (AVPacketList*) malloc(sizeof(AVPacketList)); if (!pkt1) return -1; pkt1->pkt = *pkt; pkt1->next = NULL; SDL_LockMutex(q->mutex); if (!q->last_pkt) q->first_pkt = pkt1; else q->last_pkt->next = pkt1; q->last_pkt = pkt1; q->nb_packets++; q->size += pkt1->pkt.size; SDL_CondSignal(q->cond); SDL_UnlockMutex(q->mutex); return 0; } static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block){ AVPacketList *pkt1; int ret; SDL_LockMutex(q->mutex); for (;;){ if( quit){ ret = -1; break; } pkt1 = q->first_pkt; if (pkt1){ q->first_pkt = pkt1->next; if (!q->first_pkt) q->last_pkt = NULL; q->nb_packets--; q->size -= pkt1->pkt.size; *pkt = pkt1->pkt; av_free(pkt1); //free(pkt1); ret = 1; break; } else if (!block){ ret = 0; break; } else{ SDL_CondWait(q->cond, q->mutex); } } SDL_UnlockMutex(q->mutex); return ret; } int audio_decode_frame(AVCodecContext *aCodecCtx, uint8_t *audio_buf, int buf_size){ static AVPacket pkt; static uint8_t *audio_pkt_data = NULL; static int audio_pkt_size = 0; int len1, data_size; for (;;){ while (audio_pkt_size > 0){ data_size = buf_size; len1 = avcodec_decode_audio3(aCodecCtx, (int16_t*)audio_buf, &data_size, &pkt); if (len1 < 0){ // if error, skip frame audio_pkt_size = 0; break; } audio_pkt_data += len1; audio_pkt_size -= len1; if (data_size <= 0){ // No data yet, get more frames continue; } // We have data, return it and come back for more later return data_size; } if (pkt.data) av_free_packet(&pkt); if (quit) return -1; if (packet_queue_get(&audioq, &pkt, 1) < 0) return -1; audio_pkt_data = pkt.data; audio_pkt_size = pkt.size; } } void audio_callback(void *userdata, Uint8 *stream, int len){ AVCodecContext *aCodecCtx = (AVCodecContext *)userdata; int len1, audio_size; static uint8_t audio_buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2]; static unsigned int audio_buf_size = 0; static unsigned int audio_buf_index = 0; while (len > 0){ if (audio_buf_index >= audio_buf_size){ // We have already sent all our data; get more audio_size = audio_decode_frame(aCodecCtx, audio_buf, sizeof(audio_buf)); if(audio_size < 0){ // If error, output silence audio_buf_size = 1024; // arbitrary? memset(audio_buf, 0, audio_buf_size); } else{ audio_buf_size = audio_size; } audio_buf_index = 0; } len1 = audio_buf_size - audio_buf_index; if (len1 > len) len1 = len; memcpy(stream, (uint8_t *)audio_buf + audio_buf_index, len1); len -= len1; stream += len1; audio_buf_index += len1; } } void setup_ffmpeg(char* filename) { if (avformat_open_input(&pFormatCtx, filename, NULL, NULL) != 0){ fprintf(stderr, "FFmpeg failed to open file %s!\n", filename); exit(-1); } if (av_find_stream_info(pFormatCtx) < 0){ fprintf(stderr, "FFmpeg failed to retrieve stream info!\n"); exit(-1); } // Dump information about file onto standard error av_dump_format(pFormatCtx, 0, filename, 0); // Find the first video stream int i = 0; for (i; i < pFormatCtx->nb_streams; i++){ if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0){ videoStream = i; } if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && audioStream < 0){ audioStream = i; } } if (videoStream == -1){ fprintf(stderr, "No video stream found in %s!\n", filename); exit(-1); } if (audioStream == -1){ fprintf(stderr, "No audio stream found in %s!\n", filename); exit(-1); } // Get a pointer to the codec context for the audio stream aCodecCtx = pFormatCtx->streams[audioStream]->codec; // Set audio settings from codec info SDL_AudioSpec wanted_spec; wanted_spec.freq = aCodecCtx->sample_rate; wanted_spec.format = AUDIO_S16SYS; wanted_spec.channels = aCodecCtx->channels; wanted_spec.silence = 0; wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; wanted_spec.callback = audio_callback; wanted_spec.userdata = aCodecCtx; SDL_AudioSpec spec; if (SDL_OpenAudio(&wanted_spec, &spec) < 0){ fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError()); exit(-1); } AVCodec* aCodec = avcodec_find_decoder(aCodecCtx->codec_id); if (!aCodec){ fprintf(stderr, "Unsupported codec [1]!\n"); exit(-1); } avcodec_open(aCodecCtx, aCodec); // audio_st = pFormatCtx->streams[index] packet_queue_init(&audioq); SDL_PauseAudio(0); // Get a pointer to the codec context for the video stream pCodecCtx = pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream AVCodec* pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL){ fprintf(stderr, "Unsupported codec [2]!\n"); exit(-1); // Codec not found } // Open codec if (avcodec_open(pCodecCtx, pCodec) < 0){ fprintf(stderr, "Unsupported codec [3]!\n"); exit(-1); // Could not open codec } } int main(int argc, char* argv[]) { if (argc < 2){ std::cout << "Usage: " << argv[0] << " -
FFMPEG : How to transcode Attached Picture (APIC) from source mp3 file to destination mp3 file using FFMPEG ?
24 février 2012, par javanetbeansI have input file as 02.mp3. I want to change it to mp3 file with some bit rate. While doing so, I want to preserve all the metadata plus the APIC, attached picture corresponding to image should also be transfered to the destionation file. I am using FFMPEG and i am using the following command...
ffmpeg -y -i 02.mp3 -id3v2_version 3 -ab 128000 -ss 0 -acodec libmp3lame -f mp3 -ac 2 -ar 44100 output.mp3
source file: 02.mp3 destination file : output.mp3.
But in destination file, i am not getting APIC(attached picture corresponding to 02.mp3).I am getting all other mp3 tags in output.mp3 except for APIC. How to get APIC in destinaton file as well?
-
FFMPEG : How to transcode(transfer) Attached Picture (APIC) from source mp3 file to destination mp3 file using FFMPEG ?
24 février 2012, par javanetbeansI have input file as 02.mp3. I want to change it to mp3 file only with specific bit rate. While doing so, I want to preserve all the metadata and the APIC, attached picture corresponding to image should also be transfered to the destionation file. I am using FFMPEG and i am using the following command line...
ffmpeg -y -i 02.mp3 -id3v2_version 3 -ab 128000 -ss 0 -acodec libmp3lame -f mp3 -ac 2 -ar 44100 output.mp3
source file: 02.mp3 destination file : output.mp3.
But in destination file, i am not getting APIC(attached picture corresponding to 02.mp3). Can anyone help. I am getting all other mp3 tags in output.mp3 except for APIC. How to get APIC in destinaton file as well?
-
so Confused, why my build of libffmpeg.so > 17M ?
24 février 2012, par ghostI did build ffmpeg for Android in winxp and scientific linux , ffmpeg is in dolphin player -- an open source video player (http://code.google.com/p/dolphin-player/), and i just build the ffmpeg, its seems like the same as in rockplayer 1.7.0, they all use build_andriod.sh below, it worked in both winxp and linux, and all successfully got bin/ffmpeg (less than 5MB), but libffmpeg.so ( > 17MB), when put libffmpeg.so in dolphin-player libs , player can't work, the size 17MB is too large, the original libffmpeg.so in olphin-player libs is less than 5MB, please give some advice.
#!/bin/bash ###################################################### # FFmpeg builds script for Android+ARM platform # # This script is released under term of # CDDL (http://www.opensource.org/licenses/cddl1) # Wrote by pinxue (~@gmail.com) from RockPlayer.com # 2010-8 ~ 2011-4 ###################################################### ###################################################### # Usage: # put this script in top of FFmpeg source tree # ./build_android # # It generates binary for following architectures: # ARMv6 # ARMv6+VFP # ARMv7+VFM-ïd16 (Tegra2) # ARMv7+Neon (Cortex-A8) # # Customizing: # 1. Feel free to change ./configure parameters for more features # 2. To adapt other ARM variants # set $CPU and $OPTIMIZE_CFLAGS # call build_one ###################################################### export TMPDIR=D:/tmp/android export NDK=D:/android-ndk-r4 #PLATFORM=$NDK/build/platforms/android-8/arch-arm/ PLATFORM=$NDK/build/platforms/android-8/arch-arm #PREBUILT=$NDK/build/prebuilt/darwin-x86/arm-eabi-4.4.0 PREBUILT=$NDK/build/prebuilt/windows/arm-eabi-4.4.0 function build_one { # -fasm : required. Android header file uses asm keyword instead of __asm__ , but most of c dialect (like ansi,c99,gnu99) implies -fno-asm. # ~/android/android-ndk-r4/build/platforms/android-5/arch-arm//usr/include/asm/byteorder.h: In function '___arch__swab32': # ~/android/android-ndk-r4/build/platforms/android-5/arch-arm//usr/include/asm/byteorder.h:25: error: expected ')' before ':' token # -fno-short-enums : optimized. Else FFmpeg obj will generate a huge number of warning for variable-size enums, # though we may suppress them by --no-enum-size-warning, it would be better to avoid it. # .../ld: warning: cmdutils.o uses variable-size enums yet the output is to use 32-bit enums; use of enum values across objects may fail # --extra-libs="-lgcc" : required. Else cannot solve some runtime function symbols # ... undefined reference to `__aeabi_f2uiz' # --enable-protocols : required. Without this option, the file open always fails mysteriously. # FFmpeg's av_open_input_file will invoke file format probing functions, but because most of useful demuxers has flag of zero # which cause them are ignored during file format probling and fall to url stream parsing, # if protocols are disabled, the file:// url cannot be opened as well. # $PREBUILT/bin/arm-eabi-ar d libavcodec/libavcodec.a inverse.o : required. # FFmpeg includes two copies of inverse.c both in libavutil and libavcodec for performance consideration (not sure the benifit yet) # Without this step, final ld of generating libffmpeg.so will fail silently, if invoke ld through gcc, gcc will collect more reasonable error message. # -llog: debug only, FFmpeg itself doesn't require it at all. # With this option, we may simply includes "utils/Log.h" and use LOGx() to observe FFmpeg's behavior # PS, it seems the toolchain implies -DNDEBUG somewhere, it would be safer to use following syntax # #ifdef NDEBUG # #undef NDEBUG # #define HAVE_NDEBUG # #endif # #include "utils/Log.h" # #ifdef HAVE_NDEBUG # #define NDEBUG # #undef HAVE_NDEBUG # #endif # --whole-archive : required. Else ld generate a small .so file (about 15k) # --no-stdlib : required. Android doesn't use standard c runtime but invited its own wheal (bionic libc) because of license consideration. # space before \ of configure lines: required for some options. Else next line will be merged into previous lines's content and cause problem. # Especially the --extra-cflags, the next line will pass to gcc in this case and configure will say gcc cannot create executable. # many options mentioned by articles over internet are implied by -O2 or -O3 already, need not repeat at all. # two or three common optimization cflags are omitted because not sure about the trade off yet. invoke NDK build system with V=1 to find them. # -Wl,-T,$PREBUILT/arm-eabi/lib/ldscripts/armelf.x mentioned by almost every articles over internet, but it is not required to specify at all. # -Dipv6mr_interface=ipv6mr_ifindex : required. Android inet header doesn't use ipv6mr_interface which is required by rfc, seems it generate this user space header file directly from kernel header file, but Linux kernel has decided to keep its own name for ever and ask user space header to use rfc name. # HAVE_SYS_UIO_H : required. Else: # In file included from ~/android/android-ndk-r4/build/platforms/android-5/arch-arm//usr/include/linux/socket.h:29, # from ~/android/android-ndk-r4/build/platforms/android-5/arch-arm//usr/include/sys/socket.h:33, # from libavformat/network.h:35, # from libavformat/utils.c:46: #~/android/android-ndk-r4/build/platforms/android-5/arch-arm//usr/include/linux/uio.h:19: error: redefinition of 'struct iovec' # # --disable-doc : required because of strange bug of toolchain. # # #--extra-ldflags=-Wl,-T,$PREBUILT/arm-eabi/lib/ldscripts/armelf.x -Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib -nostdlib $PREBUILT/lib/gcc/arm-eabi/4.4.0/crtbegin.o $PREBUILT/lib/gcc/arm-eabi/4.4.0/crtend.o -lc -lm -ldl" # ./configure --target-os=linux \ --prefix=$PREFIX \ --enable-cross-compile \ --extra-libs="-lgcc" \ --arch=arm \ --cc=$PREBUILT/bin/arm-eabi-gcc \ --cross-prefix=$PREBUILT/bin/arm-eabi- \ --nm=$PREBUILT/bin/arm-eabi-nm \ --sysroot=$PLATFORM \ --extra-cflags=" -O3 -fpic -DANDROID -DHAVE_SYS_UIO_H=1 -Dipv6mr_interface=ipv6mr_ifindex -fasm -Wno-psabi -fno-short-enums -fno-strict-aliasing -finline-limit=300 $OPTIMIZE_CFLAGS " \ --disable-shared \ --enable-static \ --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib -nostdlib -lc -lm -ldl -llog" \ --enable-parsers \ --disable-encoders \ --enable-decoders \ --disable-muxers \ --enable-demuxers \ --enable-swscale \ --disable-ffplay \ --disable-ffprobe \ --disable-ffserver \ --enable-network \ --enable-indevs \ --disable-bsfs \ --disable-filters \ --enable-protocols \ --enable-asm \ --disable-doc \ $ADDITIONAL_CONFIGURE_FLAG ##make clean make -j4 install $PREBUILT/bin/arm-eabi-ar d libavcodec/libavcodec.a inverse.o $PREBUILT/bin/arm-eabi-ld -rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib -soname libffmpeg.so -shared -nostdlib -z,noexecstack -Bsymbolic --whole-archive --no-undefined -o $PREFIX/libffmpeg.so libavcodec/libavcodec.a libavformat/libavformat.a libavutil/libavutil.a -lc -lm -lz -ldl -llog --warn-once --dynamic-linker=/system/bin/linker $PREBUILT/lib/gcc/arm-eabi/4.4.0/libgcc.a } #arm v6 CPU=armv6 OPTIMIZE_CFLAGS="-marm -march=$CPU" PREFIX=./android/$CPU ADDITIONAL_CONFIGURE_FLAG= build_one #arm v7vfpv3 CPU=armv7-a OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=vfpv3-d16 -marm -march=$CPU " PREFIX=./android/$CPU ADDITIONAL_CONFIGURE_FLAG= build_one #arm v7vfp CPU=armv7-a OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=vfp -marm -march=$CPU " PREFIX=./android/$CPU-vfp ADDITIONAL_CONFIGURE_FLAG= build_one #arm v7n CPU=armv7-a OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -march=$CPU -mtune=cortex-a8" PREFIX=./android/$CPU-neon ADDITIONAL_CONFIGURE_FLAG=--enable-neon build_one #arm v6+vfp CPU=armv6 OPTIMIZE_CFLAGS="-DCMP_HAVE_VFP -mfloat-abi=softfp -mfpu=vfp -marm -march=$CPU" PREFIX=./android/${CPU}_vfp ADDITIONAL_CONFIGURE_FLAG= build_one
-
FFmpeg AVFrame to OpenGL texture without YUV to RGB soft conversion
23 février 2012, par zebeurtonI want to decode an encrypted H264 video file on iOS. I already have ported our decryption algorithm and it is working fine. However, we cannot directly use H264 hardware decoder due to lack of API in SDK.
So I am trying to find an alternative to decode H264 video. I am trying to use FFmpeg to decode these video even if there are some possible LGPL license issues. I decode H264 video without any problems and I render H264 frames thanks to OpenGL ES texture. But there are some performance issues. I instrumented my code and the bottleneck is the ffmpeg rescaling and YUV to RGB conversion. I know that I can use OpenGL ES 2.0 shaders to convert YUV to RGB with GPU acceleration (related post Alternative to ffmpeg for iOS). I also know how AVFrame structure is composed: data[0] for Y data, data[1] for U data and data[1] for V data. But I do not understand how can I use line size[x] with data[x] to transmit data to OpenGL texture.
Does anybody have an example of AVFrame YUV to OpenGL texture ?
Thanks, David