Recherche avancée

Médias (91)

Autres articles (14)

  • Keeping control of your media in your hands

    13 avril 2011, par

    The vocabulary used on this site and around MediaSPIP in general, aims to avoid reference to Web 2.0 and the companies that profit from media-sharing.
    While using MediaSPIP, you are invited to avoid using words like "Brand", "Cloud" and "Market".
    MediaSPIP is designed to facilitate the sharing of creative media online, while allowing authors to retain complete control of their work.
    MediaSPIP aims to be accessible to as many people as possible and development is based on expanding the (...)

  • Contribute to a better visual interface

    13 avril 2011

    MediaSPIP is based on a system of themes and templates. Templates define the placement of information on the page, and can be adapted to a wide range of uses. Themes define the overall graphic appearance of the site.
    Anyone can submit a new graphic theme or template and make it available to the MediaSPIP community.

  • Des sites réalisés avec MediaSPIP

    2 mai 2011, par

    Cette page présente quelques-uns des sites fonctionnant sous MediaSPIP.
    Vous pouvez bien entendu ajouter le votre grâce au formulaire en bas de page.

Sur d’autres sites (6588)

  • rtmp streaming video does not work [on hold]

    12 mai 2017, par 김동영

    I want to develop rtmp live streaming. It uses ffmpeg and sdl. First, I want to output a simple image. Packets come in but no image is output. Please let me know why the following sources do not work.
    I can not find the answer throughout the week and I ask.
    It is being developed as an iOS objective-c.
    I hope you help me.
    Have a good day

    thank you.

    <code>

    #import <libavcodec></libavcodec>avcodec.h>
    #import <libavformat></libavformat>avformat.h>
    #import <libswscale></libswscale>swscale.h>
    #import <libavfilter></libavfilter>avfilter.h>
    #import <libavfilter></libavfilter>avfiltergraph.h>
    #import <libavfilter></libavfilter>buffersrc.h>
    #import <libswresample></libswresample>swresample.h>
    #import
    #import

    @implementation hello2



    - (void)viewDidLoad {
       [super viewDidLoad];

       AVFormatContext *pFormatCtx;
       int             i, videoindex;
       AVCodecContext  *pCodecCtx;
       AVCodec         *pCodec;
       AVFrame *pFrame,*pFrameYUV;
       uint8_t *out_buffer;
       AVPacket *packet;
       int y_size;
       int ret, got_picture;
       struct SwsContext *img_convert_ctx;
       //SDL---------------------------
       int screen_w=0,screen_h=0;
       SDL_Window *screen;
       SDL_Renderer* sdlRenderer;
       SDL_Texture* sdlTexture;
       SDL_Rect sdlRect;

       FILE *fp_yuv;

       av_register_all();
       avformat_network_init();
       pFormatCtx = avformat_alloc_context();

       SDL_SetMainReady();

       //rtmp://www.planeta-online.tv:1936/live/channel_4
       if(avformat_open_input(&amp;pFormatCtx,"rtmp://live.hkstv.hk.lxdns.com/live/hks",NULL,NULL)!=0){
           printf("Couldn't open input stream.\n");
           return;
       }
       if(avformat_find_stream_info(pFormatCtx,NULL)&lt;0){
           printf("Couldn't find stream information.\n");
           return;
       }
       videoindex=-1;
       for(i=0; inb_streams; i++)
           if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
               videoindex=i;
               break;
           }
       if(videoindex==-1){
           printf("Didn't find a video stream.\n");
           return;
       }
       pCodecCtx=pFormatCtx->streams[videoindex]->codec;
       pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
       if(pCodec==NULL){
           printf("Codec not found.\n");
           return;
       }
       if(avcodec_open2(pCodecCtx, pCodec,NULL)&lt;0){
           printf("Could not open codec.\n");
           return;
       }

       pFrame=av_frame_alloc();
       pFrameYUV=av_frame_alloc();
       out_buffer=(uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
       avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
       packet=(AVPacket *)av_malloc(sizeof(AVPacket));
       //Output Info-----------------------------
       printf("--------------- File Information ----------------\n");
       av_dump_format(pFormatCtx,0,"rtmp://live.hkstv.hk.lxdns.com/live/hks",0);
       printf("-------------------------------------------------\n");
       img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
                                        pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

    #if OUTPUT_YUV420P
       fp_yuv=fopen("output.yuv","wb+");
    #endif

       if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
           printf( "Could not initialize SDL - %s\n", SDL_GetError());
           return;
       }

       screen_w = pCodecCtx->width;
       screen_h = pCodecCtx->height;
       //SDL 2.0 Support for multiple windows
       screen = SDL_CreateWindow("Simplest ffmpeg player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
                                 screen_w, screen_h,
                                 SDL_WINDOW_OPENGL);

       if(!screen) {
           printf("SDL: could not create window - exiting:%s\n",SDL_GetError());
           return;
       }

       sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
       //IYUV: Y + U + V  (3 planes)
       //YV12: Y + V + U  (3 planes)
       sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,pCodecCtx->width,pCodecCtx->height);

       sdlRect.x=0;
       sdlRect.y=0;
       sdlRect.w=screen_w;
       sdlRect.h=screen_h;

       SDL_SetTextureBlendMode(sdlTexture, SDL_BLENDMODE_BLEND);

       //SDL End----------------------
       while(av_read_frame(pFormatCtx, packet)>=0){
           if(packet->stream_index==videoindex){
               ret = avcodec_decode_video2(pCodecCtx, pFrame, &amp;got_picture, packet);
               if(ret &lt; 0){
                   printf("Decode Error.\n");
                   return;
               }

               //NSLog(@"write packet pst = %lld, dts = %lld, stream = %d", packet->pts, packet->dts, packet->stream_index);

               if(got_picture){
                   sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                             pFrameYUV->data, pFrameYUV->linesize);

    #if OUTPUT_YUV420P
                   y_size=pCodecCtx->width*pCodecCtx->height;
                   fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
                   fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
                   fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
    #endif
                   //SDL---------------------------
    #if 0
                   SDL_UpdateTexture( sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0] );
    #else
                   SDL_UpdateYUVTexture(sdlTexture, &amp;sdlRect,
                                        pFrameYUV->data[0], pFrameYUV->linesize[0],
                                        pFrameYUV->data[1], pFrameYUV->linesize[1],
                                        pFrameYUV->data[2], pFrameYUV->linesize[2]);

    #endif

                   SDL_RenderClear( sdlRenderer );
                   SDL_RenderCopy( sdlRenderer, sdlTexture,  NULL, &amp;sdlRect);
                   SDL_RenderPresent( sdlRenderer );

                   //SDL End-----------------------
                   //Delay 40ms
                   SDL_Delay(20);


               }
           }
           av_free_packet(packet);
       }
       //flush decoder
       //FIX: Flush Frames remained in Codec
       while (1) {
           ret = avcodec_decode_video2(pCodecCtx, pFrame, &amp;got_picture, packet);
           if (ret &lt; 0)
               break;
           if (!got_picture)
               break;
           sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                     pFrameYUV->data, pFrameYUV->linesize);
    #if OUTPUT_YUV420P
           int y_size=pCodecCtx->width*pCodecCtx->height;
           fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
           fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
           fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
    #endif
           //SDL---------------------------
           SDL_UpdateTexture( sdlTexture, &amp;sdlRect, pFrameYUV->data[0], pFrameYUV->linesize[0] );
           SDL_RenderClear( sdlRenderer );
           SDL_RenderCopy( sdlRenderer, sdlTexture,  NULL, &amp;sdlRect);
           SDL_RenderPresent( sdlRenderer );
           //SDL End-----------------------
           //Delay 40ms
           SDL_Delay(40);
       }

       sws_freeContext(img_convert_ctx);

    #if OUTPUT_YUV420P
       fclose(fp_yuv);
    #endif

       SDL_Quit();

       av_frame_free(&amp;pFrameYUV);
       av_frame_free(&amp;pFrame);
       avcodec_close(pCodecCtx);
       avformat_close_input(&amp;pFormatCtx);


    }

    @end`
  • FFMPEG : Generate segments with millisecond precision [on hold]

    10 septembre 2018, par Kalpit

    I’m generating 10s mp3 segments off a local udp stream with

    ffmpeg -i "udp://localhost:5000/live/stream" -f segment -segment_time 10 -map 0:a -strftime 1 "/usr/share/streamed/%Y-%m-%d-%H-%M-%S.mp3"

    However, strftime doesnt seem to support millisecond precision. Is there a way I can generate filenames with millisecond accuracy ? Or is there some alternative way other than building my own ffmpeg ?

  • How to reduce input audio buffer in shaka packager [on hold]

    3 octobre 2018, par Nick Chernuha

    I have a problem with live streaming. When I’m streaming a video on 10k, the video plays well, however the audio gets stuck.
    I think it is because of a big audio buffer.
    How do I reduce it ?