Recherche avancée

Médias (39)

Mot : - Tags -/audio

Autres articles (75)

  • Contribute to translation

    13 avril 2011

    You can help us to improve the language used in the software interface to make MediaSPIP more accessible and user-friendly. You can also translate the interface into any language that allows it to spread to new linguistic communities.
    To do this, we use the translation interface of SPIP where the all the language modules of MediaSPIP are available. Just subscribe to the mailing list and request further informantion on translation.
    MediaSPIP is currently available in French and English (...)

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

  • Support audio et vidéo HTML5

    10 avril 2011

    MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
    Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
    Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
    Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...)

Sur d’autres sites (11052)

  • How to minimize latency in ffmpeg stream Java ?

    13 juillet 2022, par Taavi Sõerd

    I need to stream ffmpeg video feed in android studio and need minimal latency. Code below has achieved that when playing on galaxy s21 ultra but when I play it on galaxy tab then it's like in slow motion. When i set buffer size to 0 I get minimal latency but can't actually even see the video as it's all corrupted (all gray and colored noise).

    


    public class Decode implements Runnable {
public Activity activity;
AVFrame pFrameRGB;
SwsContext sws_ctx;
ByteBuffer bitmapBuffer;
Bitmap bmp;
byte[] array;
int imageViewWidth = 0;
int imageViewHeight = 0;
boolean imageChanged = true;
int v_stream_idx = -1;
int klv_stream_idx = -1;

boolean imageDrawMutex = false;

boolean imageIsSet = false;
ImageView imageView =  MainActivity.getmInstanceActivity().findViewById(R.id.imageView);

String mFilename = "udp://@" + MainActivity.connectionIP;;
UasDatalinkLocalSet mLatestDls;

public Decode(Activity _activity) {
    this.activity = _activity;
}

public void create_decoder(AVCodecContext codec_ctx) {
    imageChanged = true;

    // Determine required buffer size and allocate buffer
    int numBytes =av_image_get_buffer_size(AV_PIX_FMT_RGBA, codec_ctx.width(),
            codec_ctx.height(), 1);
    BytePointer buffer = new BytePointer(av_malloc(numBytes));

    bmp = Bitmap.createBitmap(codec_ctx.width(), codec_ctx.height(), Bitmap.Config.ARGB_8888);

    array = new byte[codec_ctx.width() * codec_ctx.height() * 4];
    bitmapBuffer = ByteBuffer.wrap(array);

    sws_ctx = sws_getContext(
            codec_ctx.width(),
            codec_ctx.height(),
            codec_ctx.pix_fmt(),
            codec_ctx.width(),
            codec_ctx.height(),
            AV_PIX_FMT_RGBA,
            SWS_POINT,
            null,
            null,
            (DoublePointer) null
    );

    if (sws_ctx == null) {
        Log.d("app", "Can not use sws");
        throw new IllegalStateException();
    }

    av_image_fill_arrays(pFrameRGB.data(), pFrameRGB.linesize(),
            buffer, AV_PIX_FMT_RGBA, codec_ctx.width(), codec_ctx.height(), 1);
}

@Override
public void run() {
    Log.d("app", "Start decoder");

    int ret = -1, i = 0;
    String vf_path = mFilename;

    AVFormatContext fmt_ctx = new AVFormatContext(null);
    AVPacket pkt = new AVPacket();


    AVDictionary multicastDict = new AVDictionary();

    av_dict_set(multicastDict, "rtsp_transport", "udp_multicast", 0);

    av_dict_set(multicastDict, "localaddr", getIPAddress(true), 0);
    av_dict_set(multicastDict, "reuse", "1", 0);

    av_dict_set(multicastDict, "buffer_size", "0.115M", 0);

    ret = avformat_open_input(fmt_ctx, vf_path, null, multicastDict);
    if (ret < 0) {
        Log.d("app", String.format("Open video file %s failed \n", vf_path));
        byte[] error_message = new byte[1024];
        int elen = av_strerror(ret, error_message, 1024);
        String s = new String(error_message, 0, 20);
        Log.d("app", String.format("Return: %d", ret));
        Log.d("app", String.format("Message: %s", s));
        throw new IllegalStateException();
    }
    
    if (avformat_find_stream_info(fmt_ctx, (PointerPointer) null) < 0) {
        //System.exit(-1);
        Log.d("app", "Stream info not found");
    }


    avformat.av_dump_format(fmt_ctx, 0, mFilename, 0);

    int nstreams = fmt_ctx.nb_streams();

    for (i = 0; i < fmt_ctx.nb_streams(); i++) {
        if (fmt_ctx.streams(i).codecpar().codec_type() == AVMEDIA_TYPE_VIDEO) {
            v_stream_idx = i;
        }
        if (fmt_ctx.streams(i).codecpar().codec_type() == AVMEDIA_TYPE_DATA) {
            klv_stream_idx = i;
        }
    }
    if (v_stream_idx == -1) {
        Log.d("app", "Cannot find video stream");
        throw new IllegalStateException();
    } else {
        Log.d("app", String.format("Video stream %d with resolution %dx%d\n", v_stream_idx,
                fmt_ctx.streams(v_stream_idx).codecpar().width(),
                fmt_ctx.streams(v_stream_idx).codecpar().height()));
    }

    AVCodecContext codec_ctx = avcodec_alloc_context3(null);
    avcodec_parameters_to_context(codec_ctx, fmt_ctx.streams(v_stream_idx).codecpar());


    AVCodec codec = avcodec_find_decoder(codec_ctx.codec_id());


    AVDictionary avDictionary = new AVDictionary();

    av_dict_set(avDictionary, "fflags", "nobuffer", 0);


    if (codec == null) {
        Log.d("app", "Unsupported codec for video file");
        throw new IllegalStateException();
    }
    ret = avcodec_open2(codec_ctx, codec, avDictionary);
    if (ret < 0) {
        Log.d("app", "Can not open codec");
        throw new IllegalStateException();
    }

    AVFrame frm = av_frame_alloc();

    // Allocate an AVFrame structure
    pFrameRGB = av_frame_alloc();
    if (pFrameRGB == null) {
        //System.exit(-1);
        Log.d("app", "unable to init pframergb");
    }

    create_decoder(codec_ctx);

    int width = codec_ctx.width();
    int height = codec_ctx.height();

    double fps = 15;
    

    while (true) {
        try {
            Thread.sleep(1);
        } catch (Exception e) {

        }

        try {
            if (av_read_frame(fmt_ctx, pkt) >= 0) {
                if (pkt.stream_index() == v_stream_idx) {
                    avcodec_send_packet(codec_ctx, pkt);

                    if (codec_ctx.width() != width || codec_ctx.height() != height) {
                        create_decoder(codec_ctx);
                        width = codec_ctx.width();
                        height = codec_ctx.height();
                    }
                }

                if (pkt.stream_index() == klv_stream_idx) {

                    byte[] klvDataBuffer = new byte[pkt.size()];

                    for (int j = 0; j < pkt.size(); j++) {
                        klvDataBuffer[j] = pkt.data().get(j);
                    }

                    try {
                        KLV k = new KLV(klvDataBuffer, KLV.KeyLength.SixteenBytes, KLV.LengthEncoding.BER);
                        byte[] main_payload = k.getValue();

                        // decode the Uas Datalink Local Set from main_payload binary blob.
                        mLatestDls = new UasDatalinkLocalSet(main_payload);

                        if (mLatestDls != null) {

                            MainActivity.getmInstanceActivity().runOnUiThread(new Runnable() {
                                @RequiresApi(api = Build.VERSION_CODES.Q)
                                @Override
                                public void run() {
                                    MainActivity.getmInstanceActivity().updateKlv(mLatestDls);
                                }
                            });
                        }
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                    
                }

                int wasFrameDecoded = 0;
                while (wasFrameDecoded >= 0) {
                    wasFrameDecoded = avcodec_receive_frame(codec_ctx, frm);

                    if (wasFrameDecoded >= 0) {
                        // get clip fps
                        fps = 15; //av_q2d(fmt_ctx.streams(v_stream_idx).r_frame_rate());

                        sws_scale(
                                sws_ctx,
                                frm.data(),
                                frm.linesize(),
                                0,
                                codec_ctx.height(),
                                pFrameRGB.data(),
                                pFrameRGB.linesize()
                        );

                        if(!imageDrawMutex) {
                            MainActivity.getmInstanceActivity().runOnUiThread(new Runnable() {
                                @Override
                                public void run() {
                                    if (imageIsSet) {
                                        imageDrawMutex = true;
                                        pFrameRGB.data(0).position(0).get(array);
                                        bitmapBuffer.rewind();
                                        bmp.copyPixelsFromBuffer(bitmapBuffer);

                                        if (imageChanged) {
                                            (imageView).setImageBitmap(bmp);
                                            imageChanged = false;
                                        }

                                        (imageView).invalidate();
                                        imageDrawMutex = false;
                                    } else {
                                        (imageView).setImageBitmap(bmp);
                                        imageIsSet = true;
                                    }
                                }
                            });
                        }
                    }
                }
                av_packet_unref(pkt);

            }
        } catch (Exception e) {
            e.printStackTrace();
        }

        if (false) {
            Log.d("threads", "false");

            av_frame_free(frm);

            avcodec_close(codec_ctx);
            avcodec_free_context(codec_ctx);

            avformat_close_input(fmt_ctx);
        }
    }
}


    


    This code is running in Android Studio with Java. I'm quite new on this topic so not really sure even where to start.
What could be the cause of that ?

    


  • 4 Ways to Embed User Privacy & Data Security in Your Business

    15 juillet 2022, par Erin — Privacy

    Customer analytics undeniably plays a vital role for businesses. Product improvements, interface personalisation, content improvements, and creative advertising thrive on data. 

    Yet, there’s a fine line between being a customer-centred company and a privacy-violating one. 

    Due to ubiquitous online tracking, 62% of Americans now believe that it’s impossible to go about their daily lives without companies collecting data about them. Still, despite the importance of privacy in business for consumers, companies are reluctant to act. Privacy initiatives often stay on the back burner due to perceived complexity. That’s true to some extent.

    Privacy in business does assume complex technical changes to your data management. But to be a privacy-centred organisation, you also need to re-think your processes, practices, and culture. 

    Here are four ways to start your journey to better user privacy and data security. 

    1. Revise Your Data Collection Process to Gain Consumer Trust 

    The public is wary of sharing data with businesses because they are suspicious of its subsequent usage. 

    However, not all data collection is bad or wrong. In many cases, you need specific data for service delivery, compliance, or good-natured personalisation. 

    That’s exactly what consumers expect. Almost half of US consumers say they’d trust a company that limits the amount of personal information requested and only asks for data relevant to its products/services. 

    By limiting data collection and offering transparent data usage terms, you can : 

    • Reassure reluctant users to try your product or service — hence, boost conversions and sales. 
    • Retain existing audiences by gaining their trust, which leads to loyalty and higher customer lifetime value (CLV). 

    To gain consumers’ trust, implement proper consent and opt-out mechanisms. Then create educational materials about how you are collecting and using their data.

    2. Perform Data Mapping to Determine Where Sensitive Data Rests 

    Businesses are already pressed with an expanded cyber-security radar, courtesy of remote work, digital payment processing, IoT device adoption, etc. Yet, 41% of the executives don’t think their security initiatives have kept up with the digital transformations.

    Loopholes in security eventually result in a data breach. The average cost of a data breach looms at $4.24 million globally. The sum includes regulatory fines and containment costs, plus indirect losses in the form of reduced brand equity and market share. 

    Lax data protection in business also undermines consumer trust : 87% of consumers wouldn’t transact with a company if they had qualms with its security practices. 

    To improve your security posture, analyse where you are storing sensitive consumer data, who has access to it (internally and externally), and how you are protecting it. Then work with cybersecurity specialists on implementing stronger consumer security mechanisms (e.g. auto-log offs, secure password policy, etc) and extra internal security policies (if needed). 

    At the same time, start practising data minimisation. Ensure that all collected data is : 

    • Adequate – sufficient to meet your stated objectives 
    • Relevant – is rationally linked to the objectives 
    • Limited – no unnecessary data is collected or stored
    • Timely – data is periodically reviewed and removed when unnecessary 
    Data Minimisation Principles

    These principles prevent data hoarding. Also, they help improve your security posture and regulatory compliance by reducing the volume of information you need to safeguard.

    3. Do an Inventory of Your Business Tools

    Data leaks and consumer privacy breaches often occur through third parties. Because Google Analytics was deemed in breach of European GDPR in France, Austria and Italy, businesses using it are vulnerable to lawsuits (which are already happening). 

    Investigate your corporate toolkit to determine “weak links” – tools with controversial privacy policies, murky data collection practices, and poor security. 

    Treat it as a journey and pick your battles. By relying on Big Tech products for years, you might have overlooked better alternatives. 

    For example :

    • Matomo is a privacy-centred Google Analytics alternative. Our web analytics is compliant with GDPR, CCPA, and other global privacy laws. Unlike Google Analytics, we don’t exploit any data you collect and provide full transparency into how and where it’s stored. Or if you want a simple analytics solution, Fathom is another great privacy-friendly option.
    Matomo Dashboard
    • For online data storage, you can choose Proton Drive or Nextcloud (open-source). Or host your corporate data with a local cloud hosting provider to avoid cross-border data transfers.
    Proton Drive

    4. Cultivate a Privacy-Centred Corporate Culture 

    To make privacy a competitive advantage, you need every team member (at every level) to respect its importance. 

    This is a continuous process of inspiring and educating your people. Find “privacy ambassadors” who are willing to lead the conversations, educate others, and provide resources for leading the change. 

    On an operational level, incorporate privacy principles around data minimisation, bounded collection, and usage into your Code of Conduct, standard operating procedures (SOPs), and other policies. 

    Creating a privacy-centric culture takes effort, but it pays off well. Cisco estimates that for each dollar spent on privacy, an average organisation gets $2.70 in associated benefits. Almost half (47%) of organisations gain 2X returns on their privacy initiatives.

    Moving Forward with a Data Privacy Programme 

    Privacy has become a strong differentiator for brands. Consumers crave transparency and ethical data usage. Regulators mandate limited data collection and proper security mechanisms.

    But sweeping changes are hard to implement. So start small and go one step at a time. Understand which first-party data your company collects and how it is stored.

    Then look into the tools and technologies you are using for data collection. Do these provide sufficient privacy controls ? How are they using data collected on your behalf ? Finally, move to wider transformations, pertaining to data management, cybersecurity, and cultural practices. 

    Be consistent with your effort — and eventually, all the pieces will fall into place. 

  • FFMPEG sws_scale Crash on Android

    22 septembre 2014, par Jimmy

    I have an app that convert images to video, in Google Play I see the following crash (which the only details I get is the name of the function and I don’t understand the rest) :

    backtrace:
    #00 pc 0000cc78 /data/app-lib/com.myapp-1/libswscale.so (sws_scale+204)
    #01 pc 000012af /data/app-lib/com.myapp-1/libffmpeg.so (OpenImage+322)

    code around pc:
    79065c58 e58d8068 e58d2070 e58d3074 059d00b0

    The code point to the function sws_scale, the code works almost all the time on my device (Nexus 5) but I see a lot of reports even with the same device with that issue. Any idea why this could happen ?

    AVFrame* OpenImage(const char* imageFileName, int W_VIDEO, int H_VIDEO, int* numBytes)
    {
       AVFormatContext *pFormatCtx;
       AVCodecContext *pCodecCtx;
       AVCodec *pCodec;
       AVFrame *pFrame;
       int frameFinished;
       uint8_t *buffer;
       AVPacket packet;
       int srcBytes;

       AVFrame* frame2 = NULL;// scaled frame
       uint8_t* frame2_buffer;
       struct SwsContext *resize;

       if(av_open_input_file(&pFormatCtx, imageFileName, NULL, 0, NULL)!=0)
       {
           LOGI("Can't open image file '%s'\n", imageFileName);
           return NULL;
       }
       //dump_format(pFormatCtx, 0, imageFileName, 0);
       if (av_find_stream_info(pFormatCtx) < 0)
       {
           LOGI("Can't find stream info.");
           return NULL;
       }
       pCodecCtx = pFormatCtx->streams[0]->codec;
       pCodecCtx->pix_fmt = PIX_FMT_YUV420P;

       // Find the decoder for the video stream
       pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
       if (!pCodec)
       {
           LOGI("Codec not found\n");
           return NULL;
       }

       // Open codec
       if(avcodec_open(pCodecCtx, pCodec)<0)
       {
           LOGI("Could not open codec\n");
           return NULL;
       }
       pFrame = avcodec_alloc_frame();
       if (!pFrame)
       {
           LOGI("Can't allocate memory for AVFrame\n");
           return NULL;
       }

       // Determine required buffer size and allocate buffer
       srcBytes = avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
       buffer = (uint8_t *) av_malloc(srcBytes * sizeof(uint8_t));
       avpicture_fill((AVPicture *) pFrame, buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

       // Read frame
       if (av_read_frame(pFormatCtx, &packet) >= 0)
       {
           int ret;
    //      if(packet.stream_index != 0)
    //          continue;
           ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
           if (ret > 0)
           {
               //LOGI("Frame is decoded, size %d\n", ret);
               pFrame->quality = 4;

               // Create another frame for resized result
               frame2 = avcodec_alloc_frame();
               *numBytes = avpicture_get_size(PIX_FMT_YUV420P, W_VIDEO, H_VIDEO);
               frame2_buffer = (uint8_t *)av_malloc(*numBytes * sizeof(uint8_t));
               avpicture_fill((AVPicture*)frame2, frame2_buffer, PIX_FMT_YUV420P, W_VIDEO, H_VIDEO);

               // Get resize context
               resize = sws_getContext(pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, W_VIDEO, H_VIDEO, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

               // frame2 should be filled with resized samples
               ret = sws_scale(resize, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, frame2->data, frame2->linesize);
               sws_freeContext(resize);
           }
           else
               LOGI("Error [%d] while decoding frame: %s\n", ret, strerror(AVERROR(ret)));
       }
       av_free(pFrame);
       av_free_packet(&packet);
       avcodec_close(pCodecCtx);
       //av_free(pCodecCtx);
       av_close_input_file(pFormatCtx);
       return frame2;
    }