
Recherche avancée
Médias (9)
-
Stereo master soundtrack
17 octobre 2011, par
Mis à jour : Octobre 2011
Langue : English
Type : Audio
-
Elephants Dream - Cover of the soundtrack
17 octobre 2011, par
Mis à jour : Octobre 2011
Langue : English
Type : Image
-
#7 Ambience
16 octobre 2011, par
Mis à jour : Juin 2015
Langue : English
Type : Audio
-
#6 Teaser Music
16 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Audio
-
#5 End Title
16 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Audio
-
#3 The Safest Place
16 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Audio
Autres articles (87)
-
Participer à sa traduction
10 avril 2011Vous pouvez nous aider à améliorer les locutions utilisées dans le logiciel ou à traduire celui-ci dans n’importe qu’elle nouvelle langue permettant sa diffusion à de nouvelles communautés linguistiques.
Pour ce faire, on utilise l’interface de traduction de SPIP où l’ensemble des modules de langue de MediaSPIP sont à disposition. ll vous suffit de vous inscrire sur la liste de discussion des traducteurs pour demander plus d’informations.
Actuellement MediaSPIP n’est disponible qu’en français et (...) -
Les autorisations surchargées par les plugins
27 avril 2010, parMediaspip core
autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs -
Emballe médias : à quoi cela sert ?
4 février 2011, parCe plugin vise à gérer des sites de mise en ligne de documents de tous types.
Il crée des "médias", à savoir : un "média" est un article au sens SPIP créé automatiquement lors du téléversement d’un document qu’il soit audio, vidéo, image ou textuel ; un seul document ne peut être lié à un article dit "média" ;
Sur d’autres sites (9994)
-
Converting an FFMPEG frame to a SDL_Mixer Mix_Chunk
4 septembre 2021, par ryanI'm writing a sourceport of an old video game that uses Smacker video files for its character portraits. My program deals with this by decoding the smacker video frames using FFMPEG and converting them to SDL Textures. This works fine, now I need to decode the audio into Mix_Chunks. I am new to ffmpeg and audio programming, so I'm not even sure if this design is desirable. Currently, I am using an mp4 file to test this code and the audio is seems like a bunch of random noises. However, SDL_Mixer does not error when playing the sounds.


This is my code so far.


std::unique_ptr 
createAudioSample(const std::vector &buf)
{
return std::unique_ptr
 (Mix_QuickLoad_RAW(buf.data(), buf.size()), &Mix_FreeChunk);
} 

std::vector
audioResampling(AVCodecContext *audioCtx, AVFrame *frame,
 AVSampleFormat sampleFormat, int outChannels,
 int outSampleRate)
{
 auto inChannelLayout = (audioCtx->channels ==
 av_get_channel_layout_nb_channels(audioCtx->channel_layout))
 ? audioCtx->channel_layout : av_get_default_channel_layout(audioCtx->channels);
 int outNumbSamples = 0;
 int maxOutNumbSamples = 0;
 int inNumbSamples = frame->nb_samples;
 int outLinesize = 0;
 int outNumbChannels = 0;
 auto outChannelLayout = AV_CH_LAYOUT_MONO;
 std::uint8_t **resampledAudio = nullptr;
 int resampledSize = 0;
 std::vector result;

 if(inChannelLayout <= 0)
 {
 logerror << "Could not get correct channel layout.\n";
 return {};
 }

 if(inNumbSamples <= 0)
 {
 logerror << "In numb samples is incorrect.\n";
 return {};
 }

 /* Resampler. */
 auto audioSwr = swr_alloc();
 av_opt_set_int(audioSwr, "in_channel_count", audioCtx->channels, 0);
 av_opt_set_int(audioSwr, "out_channel_count", 1, 0);
 av_opt_set_int(audioSwr, "in_channel_layout", inChannelLayout, 0);
 av_opt_set_int(audioSwr, "out_channel_layout", AV_CH_LAYOUT_MONO, 0);
 av_opt_set_int(audioSwr, "in_sample_rate", audioCtx->sample_rate, 0);
 av_opt_set_int(audioSwr, "out_sample_rate", outSampleRate, 0);
 av_opt_set_sample_fmt(audioSwr, "in_sample_fmt", audioCtx->sample_fmt, 0);
 av_opt_set_sample_fmt(audioSwr, "out_sample_fmt", sampleFormat, 0);
 swr_init(audioSwr);
 if(!swr_is_initialized(audioSwr))
 {
 logerror << "Could not initialize audio resampler.\n";
 goto resample_error;
 }

 maxOutNumbSamples = outNumbSamples = av_rescale_rnd(inNumbSamples,
 outSampleRate,
 audioCtx->sample_rate,
 AV_ROUND_UP);
 if(maxOutNumbSamples <= 0)
 {
 logerror << "Could not rescale the audio buffer.\n";
 goto resample_error;
 }

 outNumbChannels = av_get_channel_layout_nb_channels(outChannelLayout);
 if(auto ret = av_samples_alloc_array_and_samples(&resampledAudio, &outLinesize,
 outNumbChannels, outNumbSamples,
 sampleFormat, 0);
 ret < 0)
 {
 logerror << "Could not allocate samples and alloc: "
 << makeAVError(ret) << ".\n";
 goto resample_error;
 }

 /* Audio resampling. */
 if(auto ret = swr_convert(audioSwr, resampledAudio, outNumbSamples,
 (const std::uint8_t**)frame->data,
 frame->nb_samples);
 ret < 0)
 {
 logerror << "Could not preform the audio conversion: "
 << makeAVError(ret) << ".\n";
 goto resample_error;
 }
 else
 resampledSize = av_samples_get_buffer_size(&outLinesize, outNumbChannels,
 ret, sampleFormat, 1);

 if(resampledSize < 0)
 {
 logerror << "Could not get the resampled audio buffer size.\n";
 goto resample_error;
 }

 /* Copy the resampled data to the output buffer. */
 result.insert(result.end(), resampledAudio[0], resampledAudio[0] + resampledSize);

resample_error:
 if(audioSwr)
 swr_close(audioSwr);
 if(resampledAudio)
 av_freep(&resampledAudio[0]);
 return result;
}



In the video constructor :


auto audioStream = avFormatPtr->streams[audioStreamIndex];

 auto audioCodec = avcodec_find_decoder(audioStream->codecpar->codec_id);
 auto audioOrig = avcodec_alloc_context3(audioCodec);
 if(!audioOrig ||
 (avcodec_parameters_to_context(audioOrig, audioStream->codecpar) < 0))
 {
 logerror << "Could not open an audio context.\n";
 audioStreamIndex = -1;
 goto audio_error;
 }
 /* Since we cannot use the original codec, we have to copy it. */
 auto audioContext = avcodec_alloc_context3(audioCodec);
 if(!audioContext ||
 (avcodec_parameters_to_context(audioContext, audioStream->codecpar) < 0))
 {
 logerror << "Could not copy audio context.\n";
 audioStreamIndex = -1;
 goto audio_error;
 }

 if(avcodec_open2(audioContext, audioCodec, nullptr) != 0)
 {
 logerror << "Could not open the audio codec.\n";
 audioStreamIndex = -1;
 goto audio_error;
 }



When docoding frames :


auto audioRet = avcodec_receive_frame(audioContext, frame);
if(audioRet == 0 || audioRet == AVERROR(EAGAIN))
{
 bool gotFrame = (audioRet == 0);
 do
 {
 audioRet = avcodec_send_packet(audioContext, packet);
 } while(audioRet == AVERROR(EAGAIN));

 auto audioBuf = audioResampling(audioContext, frame, AV_SAMPLE_FMT_S16,
 1, 44100);

 if(audioBuf.empty())
 logerror << "Error in resampling. Skipping this chunk\n";
 else
 {
 _sounds[audioIndex] = graph::createAudioSample(audioBuf);

 _soundTimestamps[audioIndex] =
 av_rescale_q(frame->best_effort_timestamp,
 audioStream->time_base, AV_TIME_BASE_Q);
 }
 audioIndex++;
}
else
 loginfo << "AVError: " << makeAVError(audioRet) << '\n';



SDL Mixer init code :


if(Mix_OpenAudio(44100, AUDIO_S16SYS, 1, 1024))
 throw std::runtime_error("Could not initialize audio.");



-
Undefined references while using ffmpeg 2.1.1 for Android
2 décembre 2013, par KernaldI'm building ffmpeg following the same pattern as halfninja's build : make the ffmpeg's
main
available from Java through JNI. I built ffmpeg with the following options :./configure \
--prefix=$PREFIX \
--disable-shared \
--enable-static \
--disable-doc \
--disable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--disable-ffserver \
--disable-doc \
--disable-symver \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--target-os=linux \
--arch=arm \
--enable-cross-compile \
--sysroot=$SYSROOT \
--extra-cflags="-Os -fpic $ADDI_CFLAGS" \
--extra-ldflags="$ADDI_LDFLAGS"$PREFIX
,TOOLCHAIN
, … being set to corresponding folders from my NDK.$ADDI_CFLAGS
is set to-marm
and$ADDI_LDFLAGS
and$ADDITIONAL_CONFIGURE_FLAGS
are both unset. The resulting static libraries are created :$ ls -1 android/arm/lib/ libavcodec.a libavdevice.a libavfilter.a libavformat.a libavutil.a libswresample.a libswscale.a pkgconfig
I expose them with an
Android.mk
file, ffmpeg being built in$NDK/sources/ffmpeg-2.1.1
:LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavdevice
LOCAL_SRC_FILES:= lib/libavdevice.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavcodec
LOCAL_SRC_FILES:= lib/libavcodec.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavformat
LOCAL_SRC_FILES:= lib/libavformat.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libswscale
LOCAL_SRC_FILES:= lib/libswscale.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil
LOCAL_SRC_FILES:= lib/libavutil.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavfilter
LOCAL_SRC_FILES:= lib/libavfilter.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libwsresample
LOCAL_SRC_FILES:= lib/libswresample.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_STATIC_LIBRARY)So, up to there, everything looks good. Now, I try to build the
ffmpeg
binary equivalent as a static library. I copied (and didn't change a single character for now)ffmpeg.c
,ffmpeg.h
,cmdutils.c
,cmdutils.h
,ffmpeg_opt.c
andffmpeg_filter.c
in myjni
folder. I also have two directly JNI-related files in this folder (copied from halfninja's build, I just changed the package name). Here's the relevantAndroid.mk
:LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_C_INCLUDES := /Applications/android-ndk-r9b/sources/ffmpeg-2.1.1
LOCAL_CFLAGS := -Wdeprecated-declarations
LOCAL_MODULE := videokit
ANDROID_LIB := -landroid
LOCAL_LDLIBS += -llog -ljnigraphics -lz
LOCAL_SRC_FILES := videokit/com_rfc_video_ffmpeg_Videokit.c videokit/ffmpeg.c videokit/cmdutils.c videokit/ffmpeg_opt.c videokit/ffmpeg_filter.c
LOCAL_SHARED_LIBRARIES := libavdevice libavformat libavfilter libavcodec libwscale libavutil libswresample libswscale libpostproc
include $(BUILD_SHARED_LIBRARY)
$(call import-module,ffmpeg-2.1.1/android/arm)Everything compiles fine, but doesn't link. Here are the first errors :
[armeabi] SharedLibrary : libvideokit.so /Applications/android-ndk-r9b/toolchains/arm-linux-androideabi-4.6/prebuilt/darwin-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/../../../../arm-linux-androideabi/bin/ld : ./obj/local/armeabi/objs/videokit/videokit/cmdutils.o : in function print_all_libs_info.constprop.5:jni/videokit/cmdutils.c:1063 : error : undefined reference to 'swresample_version' /Applications/android-ndk-r9b/toolchains/arm-linux-androideabi-4.6/prebuilt/darwin-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/../../../../arm-linux-androideabi/bin/ld : ./obj/local/armeabi/objs/videokit/videokit/cmdutils.o : in function print_all_libs_info.constprop.5:jni/videokit/cmdutils.c:1063 : error : undefined reference to 'swresample_configuration' /Applications/android-ndk-r9b/toolchains/arm-linux-androideabi-4.6/prebuilt/darwin-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/../../../../arm-linux-androideabi/bin/ld : ./obj/local/armeabi/objs/videokit/videokit/cmdutils.o : in function opt_default:jni/videokit/cmdutils.c:558 : error : undefined reference to 'swr_get_class' /Applications/android-ndk-r9b/toolchains/arm-linux-androideabi-4.6/prebuilt/darwin-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/../../../../arm-linux-androideabi/bin/ld : ./obj/local/armeabi/objs/videokit/videokit/cmdutils.o : in function opt_default:jni/videokit/cmdutils.c:561 : error : undefined reference to 'swr_alloc' /Applications/android-ndk-r9b/toolchains/arm-linux-androideabi-4.6/prebuilt/darwin-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/../../../../arm-linux-androideabi/bin/ld : ./obj/local/armeabi/objs/videokit/videokit/cmdutils.o : in function opt_default:jni/videokit/cmdutils.c:563 : error : undefined reference to 'swr_free' /Applications/android-ndk-r9b/toolchains/arm-linux-androideabi-4.6/prebuilt/darwin-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/../../../../arm-linux-androideabi/bin/ld : ./obj/local/armeabi/objs/videokit/videokit/ffmpeg_opt.o : in function show_help_default:jni/videokit/ffmpeg_opt.c:2464 : error : undefined reference to 'swr_get_class'
What I don't understand is that these functions are defined and available in
libswresample.a
, which I'm linking to :arm-linux-androideabi-nm /Applications/android-ndk-r9b/sources/ffmpeg-2.1.1/android/arm/lib/libswresample.a | grep -i -e swr_get_class -e swresample_version 000001d4 T swr_get_class 00000178 T swresample_version
What am I doing wrong ? Is there another, maybe simpler way to expose ffmpeg as a static library available via JNI ? (I don't use halfninja's because I need at least ffmpeg 1.1, while his is in 0.9).
-
FFmpeg can't process videos with filenames containing emojis on Google Colab
25 novembre 2022, par athenaI mounted my Google Drive on Google Colab, inside the Drive, there's a video file with an emoji on it's filename (example :
20221124 [우리의식탁 W TABLE] 직접 기른 허브로 만들면 더 맛있는 허브포카치아 🌿 (8m2hNIEoXEw).mkv
).

!ffmpeg -i "/content/drive/MyDrive/DOWNLOAD/20221124 [우리의식탁 W TABLE] 직접 기른 허브로 만들면 더 맛있는 허브포카치아 🌿 (8m2hNIEoXEw).mkv"



Trying to run FFmpeg gives me this error :


---------------------------------------------------------------------------
UnicodeEncodeError Traceback (most recent call last)
 in <module>
 3 video = "/content/drive/MyDrive/DOWNLOAD/20221124 [\u110B\u116E\u1105\u1175\u110B\u1174\u1109\u1175\u11A8\u1110\u1161\u11A8 W TABLE] \u110C\u1175\u11A8\u110C\u1165\u11B8 \u1100\u1175\u1105\u1173\u11AB \u1112\u1165\u1107\u1173\u1105\u1169 \u1106\u1161\u11AB\u1103\u1173\u11AF\u1106\u1167\u11AB \u1103\u1165 \u1106\u1161\u11BA\u110B\u1175\u11BB\u1102\u1173\u11AB \u1112\u1165\u1107\u1173\u1111\u1169\u110F\u1161\u110E\u1175\u110B\u1161 \uD83C\uDF3F (8m2hNIEoXEw).mkv" #@param {type: "string"}
 4 
----> 5 get_ipython().system('ffmpeg -i "$video" #-hide_banner')

4 frames
/usr/local/lib/python3.7/dist-packages/google/colab/_shell.py in system(self, *args, **kwargs)
 93 kwargs.update({'also_return_output': True})
 94 
---> 95 output = _system_commands._system_compat(self, *args, **kwargs) # pylint:disable=protected-access
 96 
 97 if pip_warn:

/usr/local/lib/python3.7/dist-packages/google/colab/_system_commands.py in _system_compat(shell, cmd, also_return_output)
 435 # stack.
 436 result = _run_command(
--> 437 shell.var_expand(cmd, depth=2), clear_streamed_output=False)
 438 shell.user_ns['_exit_code'] = result.returncode
 439 if -result.returncode in _INTERRUPTED_SIGNALS:

/usr/local/lib/python3.7/dist-packages/google/colab/_system_commands.py in _run_command(cmd, clear_streamed_output)
 189 stdin=stdin,
 190 stderr=child_pty,
--> 191 close_fds=True)
 192 # The child PTY is only needed by the spawned process.
 193 os.close(child_pty)

/usr/lib/python3.7/subprocess.py in __init__(self, args, bufsize, executable, stdin, stdout, stderr, preexec_fn, close_fds, shell, cwd, env, universal_newlines, startupinfo, creationflags, restore_signals, start_new_session, pass_fds, encoding, errors, text)
 798 c2pread, c2pwrite,
 799 errread, errwrite,
--> 800 restore_signals, start_new_session)
 801 except:
 802 # Cleanup if the child failed starting.

/usr/lib/python3.7/subprocess.py in _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, start_new_session)
 1480 errread, errwrite,
 1481 errpipe_read, errpipe_write,
-> 1482 restore_signals, start_new_session, preexec_fn)
 1483 self._child_created = True
 1484 finally:

UnicodeEncodeError: 'utf-8' codec can't encode characters in position 131-132: surrogates not allowed
</module>


My Colab uses Python 3.7.15 and ffmpeg/ffprobe version N-109226-g2ad199ae31-20221125 (from https://github.com/BtbN/FFmpeg-Builds).


I tried searching similar issues as mine here, but most of the solutions are way beyond my knowledge, I'm not sure how to apply them to my use case.


I'll appreciate your help, thank you !