Recherche avancée

Médias (0)

Mot : - Tags -/gis

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (56)

  • Qu’est ce qu’un éditorial

    21 juin 2013, par

    Ecrivez votre de point de vue dans un article. Celui-ci sera rangé dans une rubrique prévue à cet effet.
    Un éditorial est un article de type texte uniquement. Il a pour objectif de ranger les points de vue dans une rubrique dédiée. Un seul éditorial est placé à la une en page d’accueil. Pour consulter les précédents, consultez la rubrique dédiée.
    Vous pouvez personnaliser le formulaire de création d’un éditorial.
    Formulaire de création d’un éditorial Dans le cas d’un document de type éditorial, les (...)

  • Multilang : améliorer l’interface pour les blocs multilingues

    18 février 2011, par

    Multilang est un plugin supplémentaire qui n’est pas activé par défaut lors de l’initialisation de MediaSPIP.
    Après son activation, une préconfiguration est mise en place automatiquement par MediaSPIP init permettant à la nouvelle fonctionnalité d’être automatiquement opérationnelle. Il n’est donc pas obligatoire de passer par une étape de configuration pour cela.

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

Sur d’autres sites (8736)

  • ffmpeg - android ndk build assembler messages bad instruction

    17 septembre 2016, par Madhavan Malolan

    I have compiled the latest ffmpeg source using the following build script adapted from Telegram :

    #!/bin/bash
    #apply fix http://permalink.gmane.org/gmane.comp.video.ffmpeg.devel/203198

    function build_one {

    echo "Cleaning..."
    make clean

    echo "Configuring..."

    ./configure \
    --cc=$CC \
    --nm=$NM \
    --enable-stripping \
    --arch=$ARCH \
    --cpu=$CPU \
    --target-os=linux \
    --enable-cross-compile \
    --yasmexe=$NDK/prebuilt/darwin-x86_64/bin/yasm \
    --prefix=$PREFIX \
    --enable-pic \
    --disable-shared \
    --enable-static \
    --cross-prefix=$CROSS_PREFIX \
    --sysroot=$PLATFORM \
    --extra-cflags="-Os -DANDROID $OPTIMIZE_CFLAGS -fPIE -pie --static" \
    --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib -nostdlib -lc -lm -ldl" \
    --extra-libs="-lgcc" \
    \
    --enable-version3 \
    --enable-gpl \
    \
    --disable-doc \
    --disable-avx \
    \
    --disable-everything \
    --disable-network \
    --disable-zlib \
    --disable-debug \
    --disable-programs \
    --disable-network \
    \
    --enable-pthreads \
    --enable-protocol=file \
    --enable-decoder=h264 \
    --enable-decoder=gif \
    --enable-demuxer=mov \
    --enable-demuxer=gif \
    --enable-hwaccels \
    --enable-runtime-cpudetect \
    --enable-asm \
    $ADDITIONAL_CONFIGURE_FLAG

    #echo "continue?"
    #read
    make -j8 install

    }

    NDK=~/Downloads/android-ndk-r12b/

    #arm platform
    PLATFORM=$NDK/platforms/android-9/arch-arm
    PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64
    LD=$PREBUILT/bin/arm-linux-androideabi-ld
    AR=$PREBUILT/bin/arm-linux-androideabi-ar
    NM=$PREBUILT/bin/arm-linux-androideabi-nm
    GCCLIB=$PREBUILT/lib/gcc/arm-linux-androideabi/4.9/libgcc.a
    ARCH=arm
    CC=$PREBUILT/bin/arm-linux-androideabi-gcc
    CROSS_PREFIX=$PREBUILT/bin/arm-linux-androideabi-

    #arm v5
    CPU=armv5te
    OPTIMIZE_CFLAGS="-marm -march=$CPU"
    PREFIX=./android/$CPU
    ADDITIONAL_CONFIGURE_FLAG="--disable-armv6 --disable-armv6t2 --disable-vfp --disable-neon"
    build_one

    #arm v7n
    CPU=armv7-a
    OPTIMIZE_CFLAGS="-marm -march=$CPU"
    PREFIX=./android/$CPU
    ADDITIONAL_CONFIGURE_FLAG=--enable-neon
    build_one

    #x86 platform
    PLATFORM=$NDK/platforms/android-9/arch-x86
    PREBUILT=$NDK/toolchains/x86-4.9/prebuilt/darwin-x86_64
    LD=$PREBUILT/bin/i686-linux-android-ld
    AR=$PREBUILT/bin/i686-linux-android-ar
    NM=$PREBUILT/bin/i686-linux-android-nm
    GCCLIB=$PREBUILT/lib/gcc/i686-linux-android/4.9/libgcc.a
    ARCH=x86
    CC=$PREBUILT/bin/i686-linux-android-gcc
    CROSS_PREFIX=$PREBUILT/bin/i686-linux-android-

    CPU=i686
    OPTIMIZE_CFLAGS="-march=$CPU"
    PREFIX=./android/$CPU
    ADDITIONAL_CONFIGURE_FLAG="--disable-mmx --disable-yasm"
    build_one

    I copied the contents of the android/ folder generated to jni/ffmpeg/.
    I then tried to do a ndk-build, which fails with an error.

    The Android.mk file is as follows :

    LOCAL_PATH := $(call my-dir)

    LOCAL_MODULE    := avutil
    LOCAL_MODULE_FILENAME := libavutil
    LOCAL_SRC_FILES := ./ffmpeg/armv7-a/lib/libavutil.a
    include $(PREBUILT_STATIC_LIBRARY)

    include $(CLEAR_VARS)

    LOCAL_MODULE    := avformat
    LOCAL_MODULE_FILENAME := libavformat
       LOCAL_SRC_FILES := ./ffmpeg/armv7-a/lib/libavformat.a
    include $(PREBUILT_STATIC_LIBRARY)

    include $(CLEAR_VARS)

    LOCAL_MODULE    := avcodec
    LOCAL_MODULE_FILENAME := libavcodec
    LOCAL_SRC_FILES := ./ffmpeg/armv7-a/lib/libavcodec.a
    include $(PREBUILT_STATIC_LIBRARY)

    include $(CLEAR_VARS)
    LOCAL_MODULE    := avdevice
    LOCAL_MODULE_FILENAME := libavdevice
    LOCAL_SRC_FILES := ./ffmpeg/armv7-a/lib/libavdevice.a
    include $(PREBUILT_STATIC_LIBRARY)


    include $(CLEAR_VARS)
    LOCAL_MODULE    := avfilter
    LOCAL_MODULE_FILENAME := libavfilter
    LOCAL_SRC_FILES := ./ffmpeg/armv7-a/lib/libavfilter.a
    include $(PREBUILT_STATIC_LIBRARY)

    include $(CLEAR_VARS)
    LOCAL_MODULE    := postproc
    LOCAL_MODULE_FILENAME := libpostproc
    LOCAL_SRC_FILES := ./ffmpeg/armv7-a/lib/libpostproc.a
    include $(PREBUILT_STATIC_LIBRARY)


    include $(CLEAR_VARS)
    LOCAL_MODULE    := swresample
    LOCAL_MODULE_FILENAME := libswresample
    LOCAL_SRC_FILES := ./ffmpeg/armv7-a/lib/libswresample.a
    include $(PREBUILT_STATIC_LIBRARY)


    include $(CLEAR_VARS)
    LOCAL_MODULE    := swscale
    LOCAL_MODULE_FILENAME := libswscale
    LOCAL_SRC_FILES := ./ffmpeg/armv7-a/lib/libswscale.a
    include $(PREBUILT_STATIC_LIBRARY)


    include $(CLEAR_VARS)
    LOCAL_PRELINK_MODULE := false

    LOCAL_MODULE    := ffmpeg
    LOCAL_CFLAGS    := -w -std=c11 -Os -DNULL=0 -DSOCKLEN_T=socklen_t -DLOCALE_NOT_USED -D_LARGEFILE_SOURCE=1 -D_FILE_OFFSET_BITS=64
    LOCAL_CFLAGS    += -Drestrict='' -D__EMX__ -DOPUS_BUILD -DFIXED_POINT -DUSE_ALLOCA -DHAVE_LRINT -DHAVE_LRINTF -fno-math-errno
    LOCAL_CFLAGS    += -DANDROID_NDK -DDISABLE_IMPORTGL -fno-strict-aliasing -fprefetch-loop-arrays -DAVOID_TABLES -DANDROID_TILE_BASED_DECODE -DANDROID_ARMV6_IDCT -ffast-math -D__STDC_CONSTANT_MACROS
    LOCAL_CPPFLAGS  := -DBSD=1 -ffast-math -Os -funroll-loops -std=c++11
    LOCAL_LDLIBS    := -ljnigraphics -llog -lz -latomic
    LOCAL_STATIC_LIBRARIES := avformat avcodec avutil avfilter avdevice postproc swresample swscale
    LOCAL_SRC_FILES := ffmpeg.c

    include $(BUILD_SHARED_LIBRARY)

    The error thrown is :

    [armeabi-v7a] Compile thumb : ffmpeg <= ffmpeg.c
    /var/folders/2p/0jqlz1c94t537_53qn3h8v9h0000gn/T//cc8Tnpth.s :
    Assembler messages :
    /var/folders/2p/0jqlz1c94t537_53qn3h8v9h0000gn/T//cc8Tnpth.s:4421 :
    Error : bad instruction

    cmovg r3,r8'
     /var/folders/2p/0jqlz1c94t537_53qn3h8v9h0000gn/T//cc8Tnpth.s:4422:
     Error: bad instruction

    cmovg r2,r3’
    /var/folders/2p/0jqlz1c94t537_53qn3h8v9h0000gn/T//cc8Tnpth.s:4424 :
    Error : bad instruction

    cmovl r1,r3'
     /var/folders/2p/0jqlz1c94t537_53qn3h8v9h0000gn/T//cc8Tnpth.s:4426:
     Error: bad instruction

    cmovg r3,r8’ make : ***
    [path/to/app/obj/local/armeabi-v7a/objs/ffmpeg/ffmpeg.o]
    Error 1

    I am guessing i got the compiler wrong somewhere. I am trying to build on a mac. Is there something i am missing ? I am trying to build only for armeabi-v7a.

  • Compiling FFmpeg on OSX - "speex not found using pkg-config"

    19 septembre 2016, par n4zArh

    I recently had few problems with FFmpeg and compiling it to get library. I managed to get through all of them, however recently I found out I need to add Speex decoder (and possibly encoder) to my project. I got Speex by sources, ./configure and make;make install (later - as I had problems - I also used Brew to download Speex). I added --enable-libspeex to my configure script and every time I try to use it I get "Speex not found using pkg-config" error.

    I am sure that there’s Speex files at /usr/local/include and lib directories, I also added those two as CFLAGS and LDFLAGS, I tried building Speex with or without using --prefix (both pointing to /usr/ and /usr/local/), I tried modifying FFmpeg’s configure file (require_pkg_config with Speex call), but no matter what I try to do I fail to build it - every time with same error.

    Long story short - how to build FFmpeg with Speex decoder on OSX ? I read somewhere that libspeex-dev might be needed, but it’s available through apt-get and not Brew (unless I screwed something up).

    My build script :

    #!/bin/bash

    if [ "$NDK" = "" ]; then
       echo NDK variable not set, assuming ${HOME}/android-ndk
       export NDK=${HOME}/Library/Android/sdk/ndk-bundle
    fi

    SYSROOT=$NDK/platforms/android-16/arch-arm
    # Expand the prebuilt/* path into the correct one
    TOOLCHAIN=`echo $NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64`
    export PATH=$TOOLCHAIN/bin:$PATH

    rm -rf build/ffmpeg
    mkdir -p build/ffmpeg
    cd ffmpeg

    # Don't build any neon version for now
    for version in armv5te armv7a; do

    DEST=../build/ffmpeg
    FLAGS="--target-os=linux --cross-prefix=arm-linux-androideabi- --arch=arm"
    FLAGS="$FLAGS --sysroot=$SYSROOT"
    FLAGS="$FLAGS --enable-shared --disable-symver"
    FLAGS="$FLAGS --enable-small"
    FLAGS="$FLAGS --disable-everything"
    FLAGS="$FLAGS --enable-decoder=h264 --enable-decoder=adpcm_ima_oki --enable-decoder=adpcm_ima_ws"
    FLAGS="$FLAGS --enable-encoder=adpcm_ima_qt --enable-encoder=adpcm_ima_wav --enable-encoder=adpcm_g726"
    FLAGS="$FLAGS --enable-encoder=adpcm_g722 --enable-libspeex"

    case "$version" in
       neon)
           EXTRA_CFLAGS="-march=armv7-a -mfloat-abi=softfp -mfpu=neon"
           EXTRA_LDFLAGS="-Wl,--fix-cortex-a8"
           # Runtime choosing neon vs non-neon requires
           # renamed files
           ABI="armeabi-v7a"
           ;;
       armv7a)
           EXTRA_CFLAGS="-march=armv7-a -mfloat-abi=softfp"
           EXTRA_LDFLAGS=""
           ABI="armeabi-v7a"
           ;;
       *)
           EXTRA_CFLAGS=""
           EXTRA_LDFLAGS=""
           ABI="armeabi"
           ;;
    esac
    DEST="$DEST/$ABI"
    FLAGS="$FLAGS --prefix=$DEST"
    EXTRA_CFLAGS="$EXTRA_CFLAGS -I/usr/local/include/"
    EXTRA_LDFLAGS="$EXTRA_LDFLAGS -L/usr/local/lib"
    PKT_CONFIG_PATH="/usr/lib/pkgconfig/"
    mkdir -p $DEST
    echo $FLAGS --extra-cflags="$EXTRA_CFLAGS" --extra-ldflags="$EXTRA_LDFLAGS" > $DEST/info.txt
    ./configure $FLAGS --extra-cflags="$EXTRA_CFLAGS" --extra-ldflags="$EXTRA_LDFLAGS" | tee $DEST/configuration.txt
    [ $PIPESTATUS == 0 ] || exit 1
    rm compat/strtod.o
    rm compat/strtod.d
    make clean
    make -j4 || exit 1
    make install || exit 1

    done

    Tail of config.log :

    BEGIN /tmp/ffconf.QcYgKHFW.c
       1   #include
       2   #include
       3   float foo(complex float f, complex float g) { return cabs(f * I); }
       4   int main(void){ return (int) foo; }
    END /tmp/ffconf.QcYgKHFW.c
    arm-linux-androideabi-gcc --sysroot=/Users/mgriszbacher/Library/Android/sdk/ndk-bundle/platforms/android-16/arch-arm -isysroot /Users/mgriszbacher/Library/Android/sdk/ndk-bundle/platforms/android-16/arch-arm -D_ISOC99_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -Dstrtod=avpriv_strtod -DPIC -I/usr/local/include/ -march=armv5te -std=c99 -fomit-frame-pointer -fPIC -marm -pthread -c -o /tmp/ffconf.vfjjuG7b.o /tmp/ffconf.QcYgKHFW.c
    /tmp/ffconf.QcYgKHFW.c:1:21: fatal error: complex.h: No such file or directory
    #include
                    ^
    compilation terminated.
    check_complexfunc cexp 1
    check_ld cc
    check_cc
    BEGIN /tmp/ffconf.QcYgKHFW.c
       1   #include
       2   #include
       3   float foo(complex float f, complex float g) { return cexp(f * I); }
       4   int main(void){ return (int) foo; }
    END /tmp/ffconf.QcYgKHFW.c
    arm-linux-androideabi-gcc --sysroot=/Users/mgriszbacher/Library/Android/sdk/ndk-bundle/platforms/android-16/arch-arm -isysroot /Users/mgriszbacher/Library/Android/sdk/ndk-bundle/platforms/android-16/arch-arm -D_ISOC99_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -Dstrtod=avpriv_strtod -DPIC -I/usr/local/include/ -march=armv5te -std=c99 -fomit-frame-pointer -fPIC -marm -pthread -c -o /tmp/ffconf.vfjjuG7b.o /tmp/ffconf.QcYgKHFW.c
    /tmp/ffconf.QcYgKHFW.c:1:21: fatal error: complex.h: No such file or directory
    #include
                    ^
    compilation terminated.
    check_pkg_config speex speex/speex.h speex_decoder_init -lspeex
    false --exists --print-errors speex
    ERROR: speex not found using pkg-config
  • libav works with RTP stream on PC, but not on Android (Same RTP stream)

    25 septembre 2016, par Nitay

    I’m using libav to decode video received from a 3rd party. The video is received in an Android app and is then made into an RTP stream and published to another device.

    When I use the PC as the target device, the stream decodes properly and I see video. When I use android (same code, compiled to android) the video does not decode at all.
    This happens only with the video from the 3rd party. Other video streams works okay both on PC and on Android.

    To be clear :

    • If stream is casted from a command line using ffmpeg -> Video is displayed both on Android & on PC
    • If stream is casted from the Android App -> Video is displayed only on PC (the same code, compiled for different platforms)

    libav 11.7 was compiled to android using the following line on configure :

    NDK=/opt/android-ndk-r12b
    SYSROOT="${NDK}/platforms/android-23/arch-arm/"
    ECFLAGS="-march=armv7-a -mfloat-abi=softfp -I /usr/local/include"
    ELDFLAGS="-Wl,--fix-cortex-a8 -L /usr/local/lib"
    ARCH_SPECIFIC="--disable-asm --arch=arm --cpu=armv7-a --cross-prefix=/opt/android-ndk-r12b/prebuilt/linux-x86_64/bin/../../../toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-"

    ./configure \
    ${ARCH_SPECIFIC} \
    --target-os=linux \
    --sysroot="$SYSROOT" \
    --extra-cflags="$ECFLAGS" \
    --extra-ldflags="$ELDFLAGS" \
    --enable-shared \
    --disable-symver

    (—disabled-asm is unfortunately needed to avoid text-relocations in the compiled library which is not allowed on Android)

    Here are the libav logs from the android side : http://pastebin.com/MDE3N7BD

    The logs starting with LIBAV are libav messaged, the ones without are my own messages wrapped around the libav functions.

    Logs from the PC side : http://pastebin.com/N0Fd18F9

    The loop that reads the frames :

           // If read frame fails (which happens), keep trying
           LOG_DEBUG("Before read frame");
           while (av_read_frame(formatContext, &amp;packet) >= 0 &amp;&amp; !terminate)
           {
               LOG_DEBUG1("Packet read. Size: %d", packet.size);

               this->DecodeFrame(videoStreamIndex, formatContext->streams[videoStreamIndex]->codec, &amp;packet);

               av_free_packet(&amp;packet);
               av_init_packet(&amp;packet);
           }

    And here’s the frames decoding code :

    void VideoDecoder::DecodeFrame(int videoStreamIndex, AVCodecContext* streamCodec, AVPacket* packet)
    {
       static bool save_file = false;

       AVPixelFormat destinationFormat = AV_PIX_FMT_RGBA;
       LOG_DEBUG("Decoding frame!");


       if (this->isFirstFrame)
       {
           LOG_DEBUG("Creating codecs");
           this->isFirstFrame = false;
           // For parsing the packets, we first need to create the right codec
           AVCodec* h264Codec = NULL;
           // (I'm not sure about why does ffmpeg need this. It has an SDP file which states exactly that, but okay)
           h264Codec = avcodec_find_decoder(AV_CODEC_ID_H264);

           // Now make a copy of the codec for us to change
           codecContext = avcodec_alloc_context3(h264Codec);
           avcodec_get_context_defaults3(codecContext, h264Codec);
           avcodec_copy_context(codecContext, streamCodec);


           // Initialize codecContext to use codec
           if (avcodec_open2(codecContext, h264Codec, NULL) >= 0)
           {
               // There's a nasty edge case here that we need to handle first
               if (streamCodec->width == 0 || streamCodec->height == 0)
               {
                   // That means that the stream initialized before any packets were sent to it, we can't initialize
                   // any buffers without knowing their size. So to tackle this we'll initialize the largest buffer
                   // can think of

                   codecContext->width = MAX_RESOLUTION_WIDTH;
                   codecContext->height = MAX_RESOLUTION_HEIGHT;
               }

               // Instantiate new buffers
               int size = avpicture_get_size(AV_PIX_FMT_YUV420P, codecContext->width, codecContext->height);
               originalPic = av_frame_alloc();
               originalPicBuffer = (uint8_t*)(av_malloc(size));

               avpicture_fill((AVPicture*)originalPic, originalPicBuffer, AV_PIX_FMT_YUV420P, codecContext->width, codecContext->height);
           }

           // Instantiate an output context, for usage in the conversion of the picture
           outputFormatContext = avformat_alloc_context();
       }

       if ((packet->stream_index == videoStreamIndex) &amp;&amp; !terminate)
       {
           // Packet is video. Convert!

           if (outputStream == NULL)
           {
               //create stream in file
               outputStream = avformat_new_stream(outputFormatContext, streamCodec->codec);
               avcodec_copy_context(outputStream->codec, streamCodec);
               outputStream->sample_aspect_ratio = streamCodec->sample_aspect_ratio;
           }

           int pictureReceived = 0;
           packet->stream_index = outputStream->id;
           int result = avcodec_decode_video2(codecContext, originalPic, &amp;pictureReceived, packet);
           //          std::cout &lt;&lt; "Bytes decoded " &lt;&lt; result &lt;&lt; " check " &lt;&lt; check &lt;&lt; std::endl;

           if (pictureReceived)
           {
               LOG_DEBUG("New frame received");
               // NOTICE: It is generally not a good practice to allocate on demand instead on initialization.
               // It this case the edge cases demand it (what happens if width==0 on the first packet?)
               if (this->imageConvertContext == NULL)
               {
                   // Allocate pictures and buffers for conversion
                   this->imageConvertContext = sws_getContext(
                       codecContext->width,
                       codecContext->height,
                       codecContext->pix_fmt,
                       codecContext->width,
                       codecContext->height,
                       destinationFormat,
                       SWS_BICUBIC,
                       NULL, NULL, NULL);
               }

               if (this->convertedPic == NULL)
               {
                   int size_rgba = avpicture_get_size(destinationFormat, codecContext->width, codecContext->height);
                   convertedPicBuffer = (uint8_t*)(av_malloc(size_rgba));
                   convertedPic = av_frame_alloc();
                   avpicture_fill((AVPicture*)convertedPic, convertedPicBuffer, destinationFormat, codecContext->width, codecContext->height);
               }

               // Scale the image
               sws_scale(imageConvertContext, originalPic->data, originalPic->linesize, 0, codecContext->height, convertedPic->data, convertedPic->linesize);

               // We have a frame! Callback
               if (frameReadyCallback != NULL)
               {
                   LOG_DEBUG3("Updated frame [width=%d, height=%d, ptr=0x%08x]", codecContext->width, codecContext->height, convertedPic->data[0]);
                   if (save_file)
                   {
                       save_file = false;
                       std::string filename = "/storage/emulated/0/DCIM/saved_file.rgba";
                       save_buffer_to_file((unsigned char*)convertedPic->data[0], codecContext->width * codecContext->height * 4, filename.c_str());
                       LOG_DEBUG("Exported file");
                   }
                   frameReadyCallback((char*)convertedPic->data[0], codecContext->width, codecContext->height);
               }
           }
           else
           {
               LOG_DEBUG("Packet without frame");
           }
       }
    }

    Obviously the stream from the 3rd party is somehow different, and probably from a different encoder. But it works with libav (same version) on PC. What could be the difference on Android causing it to not find the frames ?