
Recherche avancée
Médias (39)
-
Stereo master soundtrack
17 octobre 2011, par
Mis à jour : Octobre 2011
Langue : English
Type : Audio
-
ED-ME-5 1-DVD
11 octobre 2011, par
Mis à jour : Octobre 2011
Langue : English
Type : Audio
-
1,000,000
27 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Demon Seed
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
The Four of Us are Dying
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Corona Radiata
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
Autres articles (75)
-
Submit enhancements and plugins
13 avril 2011If you have developed a new extension to add one or more useful features to MediaSPIP, let us know and its integration into the core MedisSPIP functionality will be considered.
You can use the development discussion list to request for help with creating a plugin. As MediaSPIP is based on SPIP - or you can use the SPIP discussion list SPIP-Zone. -
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Encoding and processing into web-friendly formats
13 avril 2011, parMediaSPIP automatically converts uploaded files to internet-compatible formats.
Video files are encoded in MP4, Ogv and WebM (supported by HTML5) and MP4 (supported by Flash).
Audio files are encoded in MP3 and Ogg (supported by HTML5) and MP3 (supported by Flash).
Where possible, text is analyzed in order to retrieve the data needed for search engine detection, and then exported as a series of image files.
All uploaded files are stored online in their original format, so you can (...)
Sur d’autres sites (6682)
-
ffmpeg shutting down when stream is silent
7 février 2017, par chr_lt_neyI am running ffmpeg in a Linux environment, where I merge the audio stream from an Icecast-stream and a static image in order to be able to stream that to Youtube.
At times, the Icecast stream get’s very silent, sometimes for almost a minute. That is intended to happen, due to the nature of the content. But, when the stream get’s too silent, ffmpeg just shuts down with no notice.
This is how I start ffmpeg :
ffmpeg -loop 1 -r 30 -s 1280x720 -i /var/www/html/files/youtube/image.png -i http://my-stream-to-icecast:8000/my-mount -c:v libx264 -preset fast -tune stillimage -crf 18 -c:a copy -shortest -pix_fmt yuv420p -f flv rtmp://a.rtmp.youtube.com/live2/my-youtube-key
Any ideas on how to solve this ? And/or how to optimize the above ?
Thanks a lot !
-
Ffmpeg stream-loop doesn't stream
7 juin 2023, par Jorgen TimareyI'm trying to stream generated png (every second) on twitch with this command but it stream only the first png generated and not the next.


ffmpeg -re -stream_loop -1 -i "$output_directory/screenshot.png" -c:v libx264 -preset ultrafast -tune zerolatency -crf 23 -maxrate 1M -bufsize 2M -g 60 -f flv "$output_url"



-
Livestream playback on Hololens2
20 avril 2023, par Денис ЧерныйI have encountered the following problem
My task is as follows, I need to play streaming video (raw h264 video over UDP protocol) on a 3d object. At the moment I'm using FFmpegInteropX to set a MediaSource to a Windows object.Media.Playback.MediaPlayer.
Media Player works in frame server mode, then I subscribe to the videoFrameAvailable event and transmit the resulting frame to Unity


The problem is that the performance on Hololens2 (UWP) is quite low, I can't get enough smoothness and low latency if I use texture sizes greater than 720x720. At the same time, if I run the application on a PC, I can play everything up to 4096x4096 smoothly and without delay.
Perhaps someone has some ideas on how to improve performance on Hololens2 ?



private SoftwareBitmap frameServerDest = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 1024,1024,BitmapAlphaMode.Premultiplied );
private UnityEngine.Texture2D tex = new UnityEngine.Texture2D(frameServerDest.PixelWidth, frameServerDest.PixelHeight, UnityEngine.TextureFormat.RGBA32, false);

private async void InitializeMediaPlayer(){
FFmpegInteropLogging.SetDefaultLogProvider();
 FFmpegInteropConfig configuration = new FFmpegInteropConfig()
 {
 MaxVideoThreads = 8,
 SkipErrors = uint.MaxValue,
 DefaultBufferTime = TimeSpan.Zero,
 FastSeek = true,
 VideoDecoderMode = VideoDecoderMode.ForceFFmpegSoftwareDecoder,
 };
 configuration.FFmpegOptions.Add("tune", "zerolatency");
 configuration.FFmpegOptions.Add("flags", "low_delay");
 configuration.FFmpegOptions.Add("fflags", "discardcorrupt+shortest+sortdts+ignidx+nobuffer");
 decoder = await FFmpegInteropMSS.CreateFromUriAsync("udp://127.0.0.1:9005",configuration)

 var mediaStreamSource = decoder.GetMediaStreamSource();
 mediaStreamSource.BufferTime = TimeSpan.FromSeconds(0);
 Debug.WriteLine($"{decoder.CurrentVideoStream.CodecName} {decoder.CurrentVideoStream.DecoderEngine} {decoder.CurrentVideoStream.HardwareDecoderStatus} {decoder.CurrentVideoStream.PixelWidth} x {decoder.CurrentVideoStream.PixelHeight}");
 var FrameServer = new Windows.Media.Playback.MediaPlayer() { IsVideoFrameServerEnabled = true };
 FrameServer.Source = MediaSource.CreateFromMediaStreamSource(mediaStreamSource);
 FrameServer.RealTimePlayback = true;
 FrameServer.VideoFrameAvailable += MediaPlayer_VideoFrameAvailable;
 FrameServer.Play();
}

//FrameAvailable:
 private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
 {
 CanvasDevice canvasDevice = CanvasDevice.GetSharedDevice();
 using (CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameServerDest))
 {
 
 sender.CopyFrameToVideoSurface(canvasBitmap);
 byte[] bytes = canvasBitmap.GetPixelBytes();

 if (AppCallbacks.Instance.IsInitialized())
 {
 AppCallbacks.Instance.InvokeOnAppThread(() =>
 {

 tex.LoadRawTextureData(bytes);
 tex.Apply();
 Display.GetComponent().texture = tex;
 }, false);
 }
 GC.Collect();
 }
 }



My FFmpeg output setting

ffmpeg -r 60 -f gdigrab -i desktop -f h264 -framerate 60 -vcodec libx264 -preset ultrafast -tune zerolatency -threads 8 -thread_type slice udp://127.0.0.1:9005


UPDATE :
Hello, I did some work.
What I've done :


- 

- I have established a direct connection via usb-s - etnernet
- I begin to look towards using directx surface






I found the following way to get d3d11 device using by Unity
For this I had to use the library SharpDX, and similar thread https://forum.unity.com/threads/d3d11-texture2d-blitting-framerate.562552


But there are problems that I can't solve yet :
1 FFmpeg works only in the VideoDecoderMode = VideoDecoderMode.Automatic or VideoDecoderMode.ForceFFmpegSoftwareDecoder mode ;
2 In the event handler (videoframe_available), there is still a very large load on the Garbage Collector, and apparently this causes performance problems. Moreover, the performance suffers only on Hololens.
In other VideoDecoderMode`s, the stream parameters are determined, but the videoframe_available event never fires. Latency is approx to zero, but perfomance still not very good


Perhaps there are ideas how to solve the problem with GarbageColletor ?



private SoftwareBitmap frameServerDist = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 780, 780, 
 BitmapAlphaMode.Premultiplied);
 private FFmpegInteropMSS decoder;

 private UnityEngine.GameObject Display;
 private UnityEngine.Texture2D targetTexture;
 private UnityEngine.GameObject MainCamera;
 private SharpDX.Direct3D11.Device dstDevice;
 private SharpDX.Direct3D11.DeviceContext dstContenxt;
 private SharpDX.Direct3D11.Texture2D m_DstTexture;
 private SharpDX.Direct3D11.Device srcDevice;
 private SharpDX.Direct3D11.DeviceContext srcContext;
 private static DataRectangle _rect;

 private SharpDX.Direct3D11.Texture2DDescription Texture2DDescription = new SharpDX.Direct3D11.Texture2DDescription()
 {

 ArraySize = 1,
 BindFlags = SharpDX.Direct3D11.BindFlags.ShaderResource,
 Usage = SharpDX.Direct3D11.ResourceUsage.Immutable, //GPU Only
 CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.None,
 Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm,
 MipLevels = 1,
 OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None,
 SampleDescription = new SharpDX.DXGI.SampleDescription()
 {
 Count = 1,
 Quality = 0
 }
 };





//This event occurs when UnityEngine Initialized 
 private void AppCallbacks_Initialized()
 {

 srcDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware);
 srcContext = srcDevice.ImmediateContext;

 UnityEngine.WSA.Application.InvokeOnAppThread(() =>
 {
 Display = UnityEngine.GameObject.Find("Display");
 targetTexture = null;
 //Create texture for get Device and Device context 
 UnityEngine.Texture2D deviceTexture = new UnityEngine.Texture2D(frameServerDist.PixelWidth, frameServerDist.PixelHeight, UnityEngine.TextureFormat.RGBA32, false);
 IntPtr txPtr = deviceTexture.GetNativeTexturePtr();
 SharpDX.Direct3D11.Texture2D dstTextureX = new SharpDX.Direct3D11.Texture2D(txPtr);
 dstDevice = dstTextureX.Device;
 dstContenxt = dstDevice.ImmediateContext;
 //Create sharedResource
 SharpDX.Direct3D11.Texture2DDescription sharedTextureDesc = dstTextureX.Description;
 sharedTextureDesc.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.Shared;
 m_DstTexture = new SharpDX.Direct3D11.Texture2D(dstDevice, sharedTextureDesc);

 SharpDX.Direct3D11.ShaderResourceViewDescription rvdesc = new SharpDX.Direct3D11.ShaderResourceViewDescription
 {
 Format = sharedTextureDesc.Format,
 Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D
 };
 rvdesc.Texture2D.MostDetailedMip = 0; rvdesc.Texture2D.MipLevels = 1;
 SharpDX.Direct3D11.ShaderResourceView rvptr = new SharpDX.Direct3D11.ShaderResourceView(
 dstDevice,
 m_DstTexture, rvdesc);

 targetTexture = UnityEngine.Texture2D.CreateExternalTexture(sharedTextureDesc.Width, sharedTextureDesc.Height, UnityEngine.TextureFormat.BGRA32, false, false, rvptr.NativePointer);
 MainCamera = UnityEngine.GameObject.Find("Main Camera");
 Display.GetComponent().texture = targetTexture;
 InitializeMediaPlayer();
 }, false);





private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
 {
 canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameServerDist);
 sender.CopyFrameToVideoSurface(canvasBitmap);
 var sharedResourceDst = m_DstTexture.QueryInterface();
 var sharedTexDst = srcDevice.OpenSharedResource(sharedResourceDst.SharedHandle);
 using (var _stream = DataStream.Create(canvasBitmap.GetPixelBytes(), true, false))
 {
 _rect.DataPointer = _stream.DataPointer;
 _rect.Pitch = Texture2DDescription.Width * 4;
 var srcTexture = new SharpDX.Direct3D11.Texture2D(srcDevice, Texture2DDescription, _rect);
 srcContext.CopyResource(srcTexture, sharedTexDst);
 srcContext.Flush();
 sharedResourceDst.Dispose();
 sharedTexDst.Dispose();
 srcTexture.Dispose();
 }
 }