
Recherche avancée
Autres articles (111)
-
La file d’attente de SPIPmotion
28 novembre 2010, parUne file d’attente stockée dans la base de donnée
Lors de son installation, SPIPmotion crée une nouvelle table dans la base de donnée intitulée spip_spipmotion_attentes.
Cette nouvelle table est constituée des champs suivants : id_spipmotion_attente, l’identifiant numérique unique de la tâche à traiter ; id_document, l’identifiant numérique du document original à encoder ; id_objet l’identifiant unique de l’objet auquel le document encodé devra être attaché automatiquement ; objet, le type d’objet auquel (...) -
Contribute to documentation
13 avril 2011Documentation is vital to the development of improved technical capabilities.
MediaSPIP welcomes documentation by users as well as developers - including : critique of existing features and functions articles contributed by developers, administrators, content producers and editors screenshots to illustrate the above translations of existing documentation into other languages
To contribute, register to the project users’ mailing (...) -
Ajouter des informations spécifiques aux utilisateurs et autres modifications de comportement liées aux auteurs
12 avril 2011, parLa manière la plus simple d’ajouter des informations aux auteurs est d’installer le plugin Inscription3. Il permet également de modifier certains comportements liés aux utilisateurs (référez-vous à sa documentation pour plus d’informations).
Il est également possible d’ajouter des champs aux auteurs en installant les plugins champs extras 2 et Interface pour champs extras.
Sur d’autres sites (7868)
-
How can I get decoded frames, which I sent into GPU, can i get them from GPU in rgb
21 mai 2024, par Владислав СапожникI use ffmpeg.av_hwframe_transfer_data to sent decoded frames into GPU, but i can not get them again in another good format. I try to change my shaders, use av_hwframe_transfer_get_formats but it is not working !dfghkdsfiuhsgiherghoeirughoighweroigjoiwejgoiwrjgjeoijgoiewgoheroighoieqfgoihqeoigheiogieiqrhgihergh2eouirghou2rerhg
My code :


{
 private static bool _readingComplete = false;
 private static bool _decodingComplete = false;
 private static readonly object _lock = new object();
 private static Queue<avpacket> packets = new Queue<avpacket>();
 private static readonly object _fileLock = new object();
 public static MyWindow myWindow;
 public static SKBitmap myBitmap;

 public static async unsafe Task Main(string[] args)
 {
 FFmpegBinariesHelper.RegisterFFmpegBinaries();
 DynamicallyLoadedBindings.Initialize();
 Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

 Directory.Delete("frames", true);
 Directory.CreateDirectory("frames");

 var url = "rtsp://admin:123456@192.168.1.12:554/stream0?username=admin&password=E10ADC3949BA59ABBE56E057F20";

 AVDictionary* opts = null;
 ffmpeg.av_dict_set(&opts, "-rtsp_transport", "tcp", 0);

 var vsr = new VideoStreamReader(url, opts);
 var vsd = new VideoStreamDecoder(*vsr.GetCodecParameters(), AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA);

 Task readerTask = Task.Factory.StartNew(() => ReadPackets(vsr), TaskCreationOptions.LongRunning);
 Task decoderTask = Task.Factory.StartNew(() => DecodeFrames(vsd), TaskCreationOptions.LongRunning);

 var nativeWindowSettings = new NativeWindowSettings()
 {
 ClientSize = new Vector2i(800, 600),
 Title = "My first OpenTK program!"
 };

 using (var myWindow = new MyWindow(GameWindowSettings.Default, nativeWindowSettings))
 {
 myWindow.Run();
 }
 }

 private static unsafe void ReadPackets(VideoStreamReader vsr)
 {
 while (!_readingComplete)
 {
 vsr.TryReadNextPacket(out var packet);
 lock (_lock)
 {
 packets.Enqueue(packet);
 }
 }

 _readingComplete = true;
 }

 private static unsafe void DecodeFrames(VideoStreamDecoder vsd)
 {

 Console.WriteLine($"codec name: {vsd.CodecName}");

 //var sourceSize = vsd.FrameSize;
 //var sourcePixelFormat = vsd.PixelFormat;
 //var destinationSize = sourceSize;
 //var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_RGBA;
 //using var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat);

 var frameNumber = 0;

 while (true)
 {
 AVPacket packet;
 lock (_lock)
 {
 if (packets.Count == 0)
 {
 if (_readingComplete)
 {
 break;
 }
 else
 {
 continue;
 }
 }
 packet = packets.Dequeue();
 }

 vsd.TryDecodeNextFrame(out var frame, packet);
 //var convertedFrame = vfc.Convert(frame);

 //var bitmap = new SKBitmap(convertedFrame.width, convertedFrame.height, SKColorType.Bgra8888, SKAlphaType.Opaque);
 //bitmap.InstallPixels(new SKImageInfo(convertedFrame.width, convertedFrame.height, SKColorType.Bgra8888, SKAlphaType.Opaque), (IntPtr)convertedFrame.data[0]);
 //myBitmap = bitmap;
 var bitmap = new SKBitmap(frame.width, frame.height, SKColorType.Bgra8888, SKAlphaType.Opaque);
 bitmap.InstallPixels(new SKImageInfo(frame.width, frame.height, SKColorType.Bgra8888, SKAlphaType.Opaque), (IntPtr)frame.data[0]);
 myBitmap = bitmap;
 
 Console.WriteLine($"frame: {frameNumber}");
 frameNumber++;
 }

 _decodingComplete = true;
 }

 //private static unsafe void WriteFrame(AVFrame convertedFrame, int frameNumber)
 //{
 // var imageInfo = new SKImageInfo(convertedFrame.width, convertedFrame.height, SKColorType.Bgra8888, SKAlphaType.Opaque);
 // using var bitmap = new SKBitmap();
 // bitmap.InstallPixels(imageInfo, (IntPtr)convertedFrame.data[0]);

 // string filePath;
 // lock (_fileLock)
 // {
 // filePath = $"frames/frame.{frameNumber:D8}.jpg";
 // }

 // using var stream = File.Create(filePath);
 // bitmap.Encode(stream, SKEncodedImageFormat.Jpeg, 90);
 //}
}
using OpenTK.Graphics.OpenGL4;
using OpenTK.Mathematics;
using OpenTK.Windowing.Common;
using OpenTK.Windowing.Desktop;
using OpenTK.Windowing.GraphicsLibraryFramework;
using SkiaSharp;

namespace OpenTKTask;

public class MyWindow : GameWindow
{
 private Shader shader;
 private int vertexBufferHandle;
 private int elementBufferHandle;
 private int vertexArrayHandle;
 private int texture;

 //float[] vertices =
 //{
 // 1.0f, 1.0f, 0.0f, 1.0f, 0.0f,
 // 1.0f, -1.0f, 0.0f, 1.0f, 1.0f,
 // -1.0f, -1.0f, 0.0f, 0.0f, 0.0f,
 // -1.0f, 1.0f, 0.0f, 0.0f, 1.0f
 //};

 float[] vertices =
{
 //Position | Texture coordinates
 1.0f, 1.0f, 0.0f, 1.0f, 0.0f, // top right
 1.0f, -1.0f, 0.0f, 1.0f, 1.0f, // bottom right
 -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, // bottom left
 -1.0f, 1.0f, 0.0f, 0.0f, 0.0f // top left
};

 uint[] indices =
{
 0, 1, 3,
 1, 2, 3
 };

 float[] texCoords =
 {
 0.0f, 0.0f,
 1.0f, 0.0f,
 0.5f, 1.0f,
 };

 public MyWindow(GameWindowSettings gameWindowSettings, NativeWindowSettings nativeWindowSettings) : base(gameWindowSettings, nativeWindowSettings)
 {
 this.CenterWindow(new Vector2i(1280, 760));
 }

 protected override void OnResize(ResizeEventArgs e)
 {
 GL.Viewport(0, 0, e.Width, e.Height);
 base.OnResize(e);
 }

 protected override void OnLoad()
 {
 base.OnLoad();

 shader = new Shader("C:\\Users\\1\\Desktop\\7h3_C0d3r\\OpenTKTask\\vertexShader.vert", "C:\\Users\\1\\Desktop\\7h3_C0d3r\\OpenTKTask\\fragShader.frag");
 shader.Use();

 vertexArrayHandle = GL.GenVertexArray();
 GL.BindVertexArray(vertexArrayHandle);

 vertexBufferHandle = GL.GenBuffer();
 GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferHandle);
 GL.BufferData(BufferTarget.ArrayBuffer, vertices.Length * sizeof(float), vertices, BufferUsageHint.StaticDraw);

 elementBufferHandle = GL.GenBuffer();
 GL.BindBuffer(BufferTarget.ElementArrayBuffer, elementBufferHandle);
 GL.BufferData(BufferTarget.ElementArrayBuffer, indices.Length * sizeof(uint), indices, BufferUsageHint.StaticDraw);

 var positionLocation = shader.GetAttribLocation("aPosition");
 GL.VertexAttribPointer(positionLocation, 3, VertexAttribPointerType.Float, false, 5 * sizeof(float), 0);
 GL.EnableVertexAttribArray(positionLocation);

 var texCoordLocation = shader.GetAttribLocation("aTexCoord");
 GL.VertexAttribPointer(texCoordLocation, 2, VertexAttribPointerType.Float, false, 5 * sizeof(float), 3 * sizeof(float));
 GL.EnableVertexAttribArray(texCoordLocation);

 texture = GL.GenTexture();
 GL.BindTexture(TextureTarget.Texture2D, texture);

 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)TextureWrapMode.Repeat);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)TextureWrapMode.Repeat);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);

 GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, 800, 600, 0, PixelFormat.Bgra, PixelType.UnsignedByte, IntPtr.Zero);
 GL.BindTexture(TextureTarget.Texture2D, 0);
 }

 protected override void OnUnload()
 {
 base.OnUnload();
 GL.DeleteBuffer(vertexBufferHandle);
 GL.DeleteVertexArray(vertexArrayHandle);
 GL.DeleteProgram(shader.Handle);
 GL.DeleteTexture(texture);
 }

 protected override void OnUpdateFrame(FrameEventArgs args)
 {
 base.OnUpdateFrame(args);
 }

 protected override void OnRenderFrame(FrameEventArgs args)
 {
 base.OnRenderFrame(args);

 UpdateTexture(Program.myBitmap);

 GL.Clear(ClearBufferMask.ColorBufferBit);

 GL.BindTexture(TextureTarget.Texture2D, texture);

 shader.Use();

 GL.BindVertexArray(vertexArrayHandle);

 GL.DrawElements(PrimitiveType.Triangles, indices.Length, DrawElementsType.UnsignedInt, 0);

 SwapBuffers();
 }

 public void UpdateTexture(SKBitmap bitmap)
 {
 GL.BindTexture(TextureTarget.Texture2D, texture);

 if (bitmap != null)
 {
 //byte[] pixels = bitmap.Bytes;
 GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, bitmap.Width, bitmap.Height, 0, PixelFormat.Bgra, PixelType.UnsignedByte, 0);
 }
 }

 public SKBitmap LoadBitmap(string path)
 {
 using (var stream = File.OpenRead(path))
 {
 return SKBitmap.Decode(stream);
 }
 }
}
using FFmpeg.AutoGen;
using System.Drawing;

namespace OpenTKTask;

public sealed unsafe class VideoStreamDecoder : IDisposable
{
 private readonly AVCodecContext* _pCodecContext;
 private readonly AVPacket* _pPacket;
 private readonly AVFrame* _pFrame;
 private readonly AVFrame* _receivedFrame;
 private AVFrame* _pSwFrame;
 private AVBufferRef* _pHWDeviceCtx;

 public VideoStreamDecoder(AVCodecParameters parameter, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_D3D12VA)
 {
 _receivedFrame = ffmpeg.av_frame_alloc();

 AVCodec* codec = ffmpeg.avcodec_find_decoder(parameter.codec_id);
 if (codec == null)
 throw new InvalidOperationException("Codec not found.");
 _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);

 ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();

 ffmpeg.avcodec_parameters_to_context(_pCodecContext, &parameter)
 .ThrowExceptionIfError();
 ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();

 CodecName = ffmpeg.avcodec_get_name(codec->id);
 FrameSize = new Size(_pCodecContext->width, _pCodecContext->height);
 PixelFormat = _pCodecContext->pix_fmt;

 _pFrame = ffmpeg.av_frame_alloc();
 _pPacket = ffmpeg.av_packet_alloc();
 }

 public string CodecName { get; }
 public Size FrameSize { get; }
 public AVPixelFormat PixelFormat { get; }

 public bool TryDecodeNextFrame(out AVFrame frame, AVPacket packet)
 {
 ffmpeg.av_frame_unref(_pFrame);
 ffmpeg.av_frame_unref(_receivedFrame);
 int error;

 do
 {
 ffmpeg.avcodec_send_packet(_pCodecContext, &packet).ThrowExceptionIfError();
 error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);
 } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

 error.ThrowExceptionIfError();

 if (_pCodecContext->hw_device_ctx != null)
 {
 ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0).ThrowExceptionIfError();
 Console.WriteLine((AVPixelFormat)171);
 frame = *_receivedFrame; // AV_PIX_FMT_NV11
 //Console.WriteLine((AVPixelFormat)frame.format);
 }
 else
 frame = *_pFrame; // AV_PIX_FMT_NV11
 //Console.WriteLine((AVPixelFormat)frame.format);
 return true;
 }

 public void Dispose()
 {
 var pFrame = _pFrame;
 ffmpeg.av_frame_free(&pFrame);

 var pCodecContext = _pCodecContext;
 ffmpeg.avcodec_free_context(&pCodecContext);

 if (_pHWDeviceCtx != null)
 {
 var pHWDeviceCtx = _pHWDeviceCtx;
 ffmpeg.av_buffer_unref(&pHWDeviceCtx);
 }

 if (_pSwFrame != null)
 {
 var pSwFrame = _pSwFrame;
 ffmpeg.av_frame_free(&pSwFrame);
 }
 }
}````
</avpacket></avpacket>


-
Render YUV frame using OpenTK [closed]
20 mai 2024, par dima2012 terminatormy window
Im trying to render YUV AVFrame, that i get from camera using OpenTK, im creating a rectangle and trying to apply a texture to it, but it doesnt work.


Here is my window class


using OpenTK.Graphics.Egl;
using OpenTK.Graphics.OpenGL4;
using OpenTK.Windowing.Common;
using OpenTK.Windowing.Desktop;
using OpenTK.Windowing.GraphicsLibraryFramework;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

namespace myFFmpeg
{
 public class CameraWindow : GameWindow
 {
 private int vertexBufferHandle;
 private int elementBufferHandle;
 private int vertexArrayHandle;
 private int frameNumber = 0;
 private int yTex, uTex, vTex;

 Shader shader;
 Texture texture;

 float[] vertices =
 {
 //Position | Texture coordinates
 0.5f, 0.5f, 0.0f, 1.0f, 0.0f, // top right
 0.5f, -0.5f, 0.0f, 1.0f, 1.0f, // bottom right
 -0.5f, -0.5f, 0.0f, 0.0f, 1.0f, // bottom left
 -0.5f, 0.5f, 0.0f, 0.0f, 0.0f // top left
 };


 private uint[] indices = 
 {
 0, 1, 3, // first triangle
 1, 2, 3 // second triangle
 };

 public CameraWindow(string title) : base(GameWindowSettings.Default, new NativeWindowSettings() { ClientSize = (1280, 720), Title = title }) { UpdateFrequency = 25; }

 protected override void OnUpdateFrame(FrameEventArgs e)
 {
 base.OnUpdateFrame(e);
 }

 protected override void OnLoad()
 {
 GL.ClearColor(0.5f, 0.3f, 0.3f, 1.0f);

 shader = new Shader(@"..\..\shader.vert", @"..\..\shader.frag");
 texture = new Texture();

 elementBufferHandle = GL.GenBuffer();
 GL.BindBuffer(BufferTarget.ElementArrayBuffer, elementBufferHandle);
 GL.BufferData(BufferTarget.ElementArrayBuffer, indices.Length * sizeof(uint), indices, BufferUsageHint.StaticDraw);

 vertexBufferHandle = GL.GenBuffer();
 GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferHandle);
 GL.BufferData(BufferTarget.ArrayBuffer, vertices.Length * sizeof(float), vertices, BufferUsageHint.StaticDraw);

 GL.BindBuffer(BufferTarget.ArrayBuffer, 0);

 vertexArrayHandle = GL.GenVertexArray();
 GL.BindVertexArray(vertexArrayHandle);

 GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferHandle);
 GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, 5 * sizeof(float), 0);
 GL.EnableVertexAttribArray(0);

 int vertexShader = GL.CreateShader(ShaderType.VertexShader);
 GL.ShaderSource(vertexShader, @"..\..\shader.vert");
 GL.CompileShader(vertexShader);

 int fragmentShader = GL.CreateShader(ShaderType.FragmentShader);
 GL.ShaderSource(fragmentShader, @"..\..\shader.frag");
 GL.CompileShader(fragmentShader);

 int shaderProgram = GL.CreateProgram();
 GL.AttachShader(shaderProgram, vertexShader);
 GL.AttachShader(shaderProgram, fragmentShader);
 GL.LinkProgram(shaderProgram);


 int vertexPosLocation = GL.GetAttribLocation(shaderProgram, "vertexPos");
 GL.EnableVertexAttribArray(vertexPosLocation);
 GL.VertexAttribPointer(vertexPosLocation, 2, VertexAttribPointerType.Float, false, 4 * sizeof(float), 0);

 int texCoordLocation = GL.GetAttribLocation(shaderProgram, "texCoord");
 GL.EnableVertexAttribArray(texCoordLocation);
 GL.VertexAttribPointer(texCoordLocation, 2, VertexAttribPointerType.Float, false, 4 * sizeof(float), 2 * sizeof(float));

 GL.UseProgram(shaderProgram);

 GL.ActiveTexture(TextureUnit.Texture0);
 GL.BindTexture(TextureTarget.Texture2D, yTex);
 GL.Uniform1(GL.GetUniformLocation(shaderProgram, "yTex"), 0);

 GL.ActiveTexture(TextureUnit.Texture1);
 GL.BindTexture(TextureTarget.Texture2D, uTex);
 GL.Uniform1(GL.GetUniformLocation(shaderProgram, "uTex"), 1);

 GL.ActiveTexture(TextureUnit.Texture2);
 GL.BindTexture(TextureTarget.Texture2D, vTex);
 GL.Uniform1(GL.GetUniformLocation(shaderProgram, "vTex"), 2);

 GL.BindVertexArray(0);
 //code

 base.OnLoad();
 }

 protected override void OnUnload()
 {
 GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
 GL.DeleteBuffer(vertexBufferHandle);
 GL.UseProgram(0);
 shader.Dispose();

 //code

 base.OnUnload();
 }

 protected override void OnRenderFrame(FrameEventArgs e)
 {

 GL.Clear(ClearBufferMask.ColorBufferBit);

 shader.Use();
 texture.Use(frameNumber++);

 GL.BindVertexArray(vertexArrayHandle);

 GL.DrawElements(PrimitiveType.Triangles, indices.Length, DrawElementsType.UnsignedInt, indices);

 Context.SwapBuffers();

 base.OnRenderFrame(e);
 }

 protected override void OnFramebufferResize(FramebufferResizeEventArgs e)
 {
 base.OnFramebufferResize(e);

 GL.Viewport(0, 0, e.Width, e.Height);
 }
 }
}



And my texture class :


using System;
using OpenTK;
using OpenTK.Graphics.OpenGL4;
using SkiaSharp;
using FFmpeg;
using SkiaSharp.Internals;
using StbImageSharp;
using FFmpeg.AutoGen;
using System.Threading;

namespace myFFmpeg
{
 public class Texture
 {
 int Handle, yTex, uTex, vTex;

 Program program = new Program();

 public Texture()
 {
 Handle = GL.GenTexture();
 }


 public unsafe void Use(int frameNumber)
 {
 GL.BindTexture(TextureTarget.Texture2D, Handle);

 // Generate textures only once (outside the loop)
 if (yTex == 0)
 {
 GL.GenTextures(1, out yTex);
 }
 if (uTex == 0)
 {
 GL.GenTextures(1, out uTex);
 }
 if (vTex == 0)
 {
 GL.GenTextures(1, out vTex);
 }

 // Bind textures to specific units before rendering each frame
 GL.ActiveTexture(TextureUnit.Texture0);
 GL.BindTexture(TextureTarget.Texture2D, yTex);
 GL.ActiveTexture(TextureUnit.Texture1);
 GL.BindTexture(TextureTarget.Texture2D, uTex);
 GL.ActiveTexture(TextureUnit.Texture2);

 // Update textures with new frame data from FFmpeg
 AVFrame frame = program.getFrame();
 int width = frame.width;
 int height = frame.height;

 Console.BackgroundColor = ConsoleColor.White;
 Console.ForegroundColor = ConsoleColor.Black;
 Console.WriteLine((AVPixelFormat)frame.format);
 Console.BackgroundColor = ConsoleColor.Black;


 // Assuming YUV data is stored in separate planes (Y, U, V)
 GL.BindTexture(TextureTarget.Texture2D, yTex);
 GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Luminance, width, height, 0, PixelFormat.Luminance, PixelType.UnsignedByte, (IntPtr)frame.data[0]);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);

 GL.BindTexture(TextureTarget.Texture2D, uTex);
 GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Luminance, width / 2, height / 2, 0, PixelFormat.Luminance, PixelType.UnsignedByte, (IntPtr)frame.data[1]);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);

 GL.BindTexture(TextureTarget.Texture2D, vTex);
 GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Luminance, width / 2, height / 2, 0, PixelFormat.Luminance, PixelType.UnsignedByte, (IntPtr)frame.data[2]);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);

 }
 }
}




And my shader class :


using OpenTK.Graphics.OpenGL4;
using System;
using System.IO;

namespace myFFmpeg
{
 public class Shader : IDisposable
 {
 public int Handle { get; private set; }

 public Shader(string vertexPath, string fragmentPath)
 {
 string vertexShaderSource = File.ReadAllText(vertexPath);
 string fragmentShaderSource = File.ReadAllText(fragmentPath);

 int vertexShader = GL.CreateShader(ShaderType.VertexShader);
 GL.ShaderSource(vertexShader, vertexShaderSource);
 GL.CompileShader(vertexShader);
 CheckShaderCompilation(vertexShader);

 int fragmentShader = GL.CreateShader(ShaderType.FragmentShader);
 GL.ShaderSource(fragmentShader, fragmentShaderSource);
 GL.CompileShader(fragmentShader);
 CheckShaderCompilation(fragmentShader);

 Handle = GL.CreateProgram();
 GL.AttachShader(Handle, vertexShader);
 GL.AttachShader(Handle, fragmentShader);
 GL.LinkProgram(Handle);
 CheckProgramLinking(Handle);

 GL.DetachShader(Handle, vertexShader);
 GL.DetachShader(Handle, fragmentShader);
 GL.DeleteShader(vertexShader);
 GL.DeleteShader(fragmentShader);
 }

 public void Use()
 {
 GL.UseProgram(Handle);
 }

 public int GetAttribLocation(string attribName)
 {
 return GL.GetAttribLocation(Handle, attribName);
 }

 public int GetUniformLocation(string uniformName)
 {
 return GL.GetUniformLocation(Handle, uniformName);
 }

 private void CheckShaderCompilation(int shader)
 {
 GL.GetShader(shader, ShaderParameter.CompileStatus, out int success);
 if (success == 0)
 {
 string infoLog = GL.GetShaderInfoLog(shader);
 throw new InvalidOperationException($"Shader compilation failed: {infoLog}");
 }
 }

 private void CheckProgramLinking(int program)
 {
 GL.GetProgram(program, GetProgramParameterName.LinkStatus, out int success);
 if (success == 0)
 {
 string infoLog = GL.GetProgramInfoLog(program);
 throw new InvalidOperationException($"Program linking failed: {infoLog}");
 }
 }

 public void Dispose()
 {
 GL.DeleteProgram(Handle);
 }
 }
}



Vert shader


#version 330 core
layout(location = 0) in vec3 vertexPos;
layout(location = 1) in vec2 texCoord;

out vec2 TexCoord; 

void main()
{
 gl_Position = vec4(vertexPos,1.0);
 TexCoord = texCoord;
}



Frag shader


#version 330 core
in vec2 TexCoord;
out vec4 color;

uniform sampler2D yTex;
uniform sampler2D uTex;
uniform sampler2D vTex;

void main()
{
 float y = texture(yTex, TexCoord).r;
 float u = texture(uTex, TexCoord).r - 0.5;
 float v = texture(vTex, TexCoord).r - 0.5;

 // YUV to RGB conversion (BT.709)
 float r = y + 1.5714 * v;
 float g = y - 0.6486 * u - 0.3918 * v;
 float b = y + 1.8556 * u;

 color = vec4(r, g, b, 1.0);
}



I can provide more code, if needed..


I tried changing shaders, changing textures, getting frame using
ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0);


-
Display real time frames from several RTSP streams
13 février 2024, par MraxI have this class, it uses ffmpeg library for rtsp live streaming :


#include <iostream>
#include <string>
#include <vector>
#include <mutex>

extern "C"
{
#include <libavcodec></libavcodec>avcodec.h>
#include <libavformat></libavformat>avformat.h>
#include <libavformat></libavformat>avio.h>
}

class ryMediaSource
{
public:
 ryMediaSource() {}
 ryMediaSource(const ryMediaSource& other);
 ~ryMediaSource();

 bool ryOpenMediaSource(const std::string&);

private:
 mediaSource pMediaSource;
 AVFormatContext* pFormatCtx;
 mutable std::mutex pMutex;
};
</mutex></vector></string></iostream>


And inside my main file, I have these vector of ryMediaSource and four rstp urls :


std::vector<rymediasource> mediaSources;
std::vector streams =
{
 {"rtsp://1
 {"rtsp://2
 {"rtsp://3
 {"rtsp://4
};
</rymediasource>


Creating a instance for every vector :


for (const auto& stream : streams)
{
 mediaSources.emplace_back(); // Create a new instance for each stream
}



And opening all the streams (I need to have access to all the streams, all the time).


for (size_t s = 0; s < streams.size(); s++)
{
 mediaSources[s].ryOpenMediaSource(streams[s]);
}



After all the streams are loaded, I start to display the videos all of the streams : av_read_frame(pFormatCtx, pPacket).
But I am having a gap from what is been displayed to what is really capturing from the source (IP Cameras).
From ryOpenMediaSource(streams[0]) is about 11 seconds, ryOpenMediaSource(streams[1]) about 7 seconds, ryOpenMediaSource(streams[2]) is about 4 seconds and ryOpenMediaSource(streams[3]) is real time.
I realized that the issue is on my ryOpenMediaSource code :


bool ryMediaSource::ryOpenMediaSource(const std::string& url)
{
 int rc = -1;

 pFormatCtx = avformat_alloc_context();
 if (!pFormatCtx)
 throw std::runtime_error("Failed to allocate AVFormatContext.");
 rc = avformat_open_input(&pFormatCtx, url.c_str(), NULL, NULL);
 if (rc < 0)
 {
 return false;
 }
}



My question is, why this is happening ? Why can't all streams have the same (time stamp ?) , as the last inserted in my vector of ryMediaSource ?


Should I overwrite some variable of pFormatCtx to "force" the all vector to have the (time stamp ?) as the last one ? If so, can you give me some guidance ?


Tried setting some different values on pFormatCtx after loaded with avformat_open_input(&pFormatCtx, url.c_str(), NULL, &pDicts) ; but no luck at all.


I am expecting that all streams started at the same time, even if pre loading them, for later on, transform these frames into a cv::Mat for rendering.


MRE :


Header :

#pragma once

#include <iostream>
#include <string>
#include <vector>
#include <chrono>
#include <thread>
#include <mutex>


extern "C"
{
#include <libavcodec></libavcodec>avcodec.h>
#include <libavformat></libavformat>avformat.h>
#include <libavutil></libavutil>pixdesc.h>
#include <libavutil></libavutil>hwcontext.h>
#include <libavutil></libavutil>opt.h>
#include <libavutil></libavutil>avassert.h>
#include <libavutil></libavutil>imgutils.h>
#include <libswscale></libswscale>swscale.h>
#include <libavdevice></libavdevice>avdevice.h>
#include <libavformat></libavformat>avio.h>
#include <libavutil></libavutil>time.h>
}

class ryMediaSource
{
public:
 ryMediaSource() {}
 ryMediaSource(const ryMediaSource& other);
 ~ryMediaSource();

 struct mediaSourceParams
 {
 int sx;
 int sy;
 int lsize;
 double fps;
 unsigned char* frame;
 };

 bool ryOpenMediaSource(const std::string&);
 mediaSourceParams ryGetMediaSourceFrame();
 void ryCloseMediaSource();

private:
 mediaSource pMediaSource;
 AVFormatContext* pFormatCtx;
 AVCodecContext* pCodecCtx;
 AVFrame* pFrame;
 SwsContext* pSwsCtx;
 AVPacket* pPacket;
 int pVideoStream;
 uint8_t* pBuffer;
 AVFrame* pPict;
 double pFPS;
 mutable std::mutex pMutex;
};

C++ source code :

#include "ryMediaSource.hpp"

ryMediaSource::ryMediaSource(const ryMediaSource& other)
:pFormatCtx(nullptr), 
pCodecCtx(nullptr), 
pFrame(nullptr), 
pSwsCtx(nullptr), 
pPacket(nullptr), 
pBuffer(nullptr), 
pPict(nullptr)
{
 std::lock_guard lock(other.pMutex);
 av_log_set_level(0);
 avformat_network_init();
}

bool ryMediaSource::ryOpenMediaSource(const std::string& url)
{
 int rc = -1;

 try
 {
 AVDictionary* pDicts = nullptr;

 pFormatCtx = avformat_alloc_context();
 if (!pFormatCtx)
 throw std::runtime_error("Failed to allocate AVFormatContext.");
 rc = av_dict_set(&pDicts, "rtsp_transport", "tcp", 0);
 if (rc < 0)
 throw std::runtime_error("av_dict_set failed.");
 rc = avformat_open_input(&pFormatCtx, url.c_str(), NULL, &pDicts);
 if (rc < 0)
 {
 av_dict_free(&pDicts); // Free the dictionary in case of an error
 throw std::runtime_error("Could not open source.");
 }
 }
 catch (const std::exception& e)
 {
 std::cerr << "Exception: " << e.what() << std::endl;
 return false;
 }

 try
 {
 rc = avformat_find_stream_info(pFormatCtx, NULL);
 if (rc < 0)
 {
 throw std::runtime_error("Could not find stream information.");
 }
 pVideoStream = -1;
 for (size_t v = 0; v < pFormatCtx->nb_streams; ++v)
 {
 if (pFormatCtx->streams[v]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
 {
 pVideoStream = static_cast<int>(v);
 AVRational rational = pFormatCtx->streams[pVideoStream]->avg_frame_rate;
 pFPS = 1.0 / ((double)rational.num / (double)(rational.den));
 break;
 }
 }
 if (pVideoStream < 0)
 {
 throw std::runtime_error("Could not find video stream.");
 }

 const AVCodec* pCodec = avcodec_find_decoder(pFormatCtx->streams[pVideoStream]->codecpar->codec_id);
 if (!pCodec)
 {
 throw std::runtime_error("Unsupported codec!");
 }
 pCodecCtx = avcodec_alloc_context3(pCodec);
 if (!pCodecCtx)
 {
 throw std::runtime_error("Failed to allocate AVCodecContext.");
 }
 rc = avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[pVideoStream]->codecpar);
 if (rc != 0)
 {
 throw std::runtime_error("Could not copy codec context.");
 }
 rc = avcodec_open2(pCodecCtx, pCodec, NULL);
 if (rc < 0)
 {
 throw std::runtime_error("Could not open codec.");
 }
 pFrame = av_frame_alloc();
 if (!pFrame)
 {
 throw std::runtime_error("Could not allocate frame.");
 }
 pSwsCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_BGR24, SWS_BILINEAR, NULL, NULL, NULL);
 if (!pSwsCtx)
 {
 throw std::runtime_error("Failed to allocate SwsContext.");
 }
 pPacket = av_packet_alloc();
 if (!pPacket)
 {
 throw std::runtime_error("Could not allocate AVPacket.");
 }
 pBuffer = (uint8_t*)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1));
 if (!pBuffer)
 {
 throw std::runtime_error("Could not allocate buffer.");
 }
 pPict = av_frame_alloc();
 if (!pPict)
 {
 throw std::runtime_error("Could not allocate frame.");
 }
 av_image_fill_arrays(pPict->data, pPict->linesize, pBuffer, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1);
 }
 catch (const std::exception& e)
 {
 std::cerr << "Exception: " << e.what() << std::endl;
 return false;
 }

 return true;
}

ryMediaSource::mediaSourceParams ryMediaSource::ryGetMediaSourceFrame()
{
 mediaSourceParams msp = { 0, 0, 0, 0.0, nullptr };
 char errbuf[AV_ERROR_MAX_STRING_SIZE];

 std::lock_guard lock(pMutex);
 if (av_read_frame(pFormatCtx, pPacket) >= 0)
 {
 if (pPacket->stream_index == pVideoStream)
 {
 int ret = avcodec_send_packet(pCodecCtx, pPacket);
 if (ret < 0)
 {
 av_strerror(ret, errbuf, sizeof(errbuf));
 std::cerr << "Error sending packet for avcodec_send_packet: " << errbuf << std::endl;

 std::cerr << "avcodec_flush_buffers " << errbuf << std::endl;
 avcodec_flush_buffers(pCodecCtx);
 // Handle specific error cases
 if (ret == AVERROR(EAGAIN))
 {
 std::cerr << "EAGAIN indicates that more input is required" << std::endl;
 }
 else if (ret == AVERROR_EOF)
 {
 std::cerr << "AVERROR_EOF indicates that the encoder has been fully flushed" << std::endl;
 }
 else
 {
 //std::cerr << "avcodec_flush_buffers " << errbuf << std::endl;
 // For other errors, you may choose to flush the codec context and continue decoding.
 //avcodec_flush_buffers(pCodecCtx);
 }
 }
 ret = avcodec_receive_frame(pCodecCtx, pFrame);
 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
 {
 av_strerror(ret, errbuf, sizeof(errbuf));

 std::cerr << "Error receiving packet for avcodec_receive_frame: " << errbuf << std::endl;


 // EAGAIN indicates that more frames are needed or EOF is reached.
 // You may choose to break out of the loop or handle it based on your application's logic.

 return msp;
 }
 else if (ret < 0)
 {
 av_strerror(ret, errbuf, sizeof(errbuf));
 std::cerr << "Error receiving frame for avcodec_receive_frame: " << errbuf << std::endl;
 // Optionally, handle specific error cases
 if (ret == AVERROR(EINVAL))
 {
 std::cerr << "EINVAL indicates that more input is required" << std::endl;

 //break;
 }
 else
 {
 std::cerr << "For other errors" << std::endl;

 //break;
 }
 }
 // Move memory allocation outside the loop if frame size is constant
 size_t bufferSize = static_cast(pPict->linesize[0]) * pCodecCtx->height;
 msp.frame = new unsigned char[bufferSize];
 msp.lsize = pPict->linesize[0];
 msp.sx = pCodecCtx->width;
 msp.sy = pCodecCtx->height;
 msp.fps = pFPS;
 sws_scale(pSwsCtx, (uint8_t const* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pPict->data, pPict->linesize);
 std::memcpy(msp.frame, pBuffer, bufferSize);
 //delete[] msp.frame;
 }

 // Unref packet for non-video streams
 av_packet_unref(pPacket);
 }

 return msp;
}

main.cpp

std::vector streams =
{
 {"rtsp://1},
 {"rtsp://2},
 {"rtsp://3},
 {"rtsp://4},
};

std::vector<rymediasource> mediaSources;

void main()
{
 int key = 0;
 int channel = 0;
 std::vector streamFrame(streams.size());
 ryMediaSource::mediaSourceParams msp = { 0, 0, 0, 0.0, nullptr };

 for (const auto& stream : streams)
 {
 mediaSources.emplace_back(); // Create a new instance for each stream
 }
 for (size_t s = 0; s < streams.size(); s++)
 {
 try
 {
 mediaSources[s].ryOpenMediaSource(streams[s]);
 }
 catch (const std::exception& e)
 {
 std::cerr << "Error initializing stream " << s << ": " << e.what() << std::endl;
 }
 }

 cv::namedWindow("ryInferenceServer", cv::WINDOW_FREERATIO);
 cv::resizeWindow("ryInferenceServer", 640, 480);
 cv::moveWindow("ryInferenceServer", 0, 0);
 for (;;)
 {
 for (size_t st = 0; st < mediaSources.size(); ++st)
 {
 msp = mediaSources[st].ryGetMediaSourceFrame();
 if (msp.frame != nullptr)
 {
 cv::Mat preview;
 cv::Mat frame(msp.sy, msp.sx, CV_8UC3, msp.frame, msp.lsize);
 cv::resize(frame, preview, cv::Size(640, 480));
 if (!frame.empty())
 {
 try
 {
 streamFrame[st] = frame.clone();
 if (channel == st)
 {
 cv::imshow("ryInferenceServer", preview);
 key = cv::waitKeyEx(1);
 if (key == LEFT_KEY)
 {
 channel--;
 if (channel < 0)
 channel = 0;
 }
 if (key == RIGHT_KEY)
 {
 channel++;
 if (channel >= mediaSources.size())
 channel = mediaSources.size() - 1;
 }
 if (key == 27)
 break;
 }
 streamFrame[st].release();
 delete[] msp.frame;
 }
 catch (const std::exception& e)
 {
 std::cerr << "Exception in processing frame for stream " << st << ": " << e.what() << std::endl;
 }
 }
 frame.release();
 }
 }
 }
}
</rymediasource></int></mutex></thread></chrono></vector></string></iostream>