
Recherche avancée
Autres articles (37)
-
Contribute to documentation
13 avril 2011Documentation is vital to the development of improved technical capabilities.
MediaSPIP welcomes documentation by users as well as developers - including : critique of existing features and functions articles contributed by developers, administrators, content producers and editors screenshots to illustrate the above translations of existing documentation into other languages
To contribute, register to the project users’ mailing (...) -
Selection of projects using MediaSPIP
2 mai 2011, parThe examples below are representative elements of MediaSPIP specific uses for specific projects.
MediaSPIP farm @ Infini
The non profit organizationInfini develops hospitality activities, internet access point, training, realizing innovative projects in the field of information and communication technologies and Communication, and hosting of websites. It plays a unique and prominent role in the Brest (France) area, at the national level, among the half-dozen such association. Its members (...) -
Use, discuss, criticize
13 avril 2011, parTalk to people directly involved in MediaSPIP’s development, or to people around you who could use MediaSPIP to share, enhance or develop their creative projects.
The bigger the community, the more MediaSPIP’s potential will be explored and the faster the software will evolve.
A discussion list is available for all exchanges between users.
Sur d’autres sites (8354)
-
Render YUV frame using OpenTK [closed]
20 mai 2024, par dima2012 terminatormy window
Im trying to render YUV AVFrame, that i get from camera using OpenTK, im creating a rectangle and trying to apply a texture to it, but it doesnt work.


Here is my window class


using OpenTK.Graphics.Egl;
using OpenTK.Graphics.OpenGL4;
using OpenTK.Windowing.Common;
using OpenTK.Windowing.Desktop;
using OpenTK.Windowing.GraphicsLibraryFramework;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

namespace myFFmpeg
{
 public class CameraWindow : GameWindow
 {
 private int vertexBufferHandle;
 private int elementBufferHandle;
 private int vertexArrayHandle;
 private int frameNumber = 0;
 private int yTex, uTex, vTex;

 Shader shader;
 Texture texture;

 float[] vertices =
 {
 //Position | Texture coordinates
 0.5f, 0.5f, 0.0f, 1.0f, 0.0f, // top right
 0.5f, -0.5f, 0.0f, 1.0f, 1.0f, // bottom right
 -0.5f, -0.5f, 0.0f, 0.0f, 1.0f, // bottom left
 -0.5f, 0.5f, 0.0f, 0.0f, 0.0f // top left
 };


 private uint[] indices = 
 {
 0, 1, 3, // first triangle
 1, 2, 3 // second triangle
 };

 public CameraWindow(string title) : base(GameWindowSettings.Default, new NativeWindowSettings() { ClientSize = (1280, 720), Title = title }) { UpdateFrequency = 25; }

 protected override void OnUpdateFrame(FrameEventArgs e)
 {
 base.OnUpdateFrame(e);
 }

 protected override void OnLoad()
 {
 GL.ClearColor(0.5f, 0.3f, 0.3f, 1.0f);

 shader = new Shader(@"..\..\shader.vert", @"..\..\shader.frag");
 texture = new Texture();

 elementBufferHandle = GL.GenBuffer();
 GL.BindBuffer(BufferTarget.ElementArrayBuffer, elementBufferHandle);
 GL.BufferData(BufferTarget.ElementArrayBuffer, indices.Length * sizeof(uint), indices, BufferUsageHint.StaticDraw);

 vertexBufferHandle = GL.GenBuffer();
 GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferHandle);
 GL.BufferData(BufferTarget.ArrayBuffer, vertices.Length * sizeof(float), vertices, BufferUsageHint.StaticDraw);

 GL.BindBuffer(BufferTarget.ArrayBuffer, 0);

 vertexArrayHandle = GL.GenVertexArray();
 GL.BindVertexArray(vertexArrayHandle);

 GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferHandle);
 GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, 5 * sizeof(float), 0);
 GL.EnableVertexAttribArray(0);

 int vertexShader = GL.CreateShader(ShaderType.VertexShader);
 GL.ShaderSource(vertexShader, @"..\..\shader.vert");
 GL.CompileShader(vertexShader);

 int fragmentShader = GL.CreateShader(ShaderType.FragmentShader);
 GL.ShaderSource(fragmentShader, @"..\..\shader.frag");
 GL.CompileShader(fragmentShader);

 int shaderProgram = GL.CreateProgram();
 GL.AttachShader(shaderProgram, vertexShader);
 GL.AttachShader(shaderProgram, fragmentShader);
 GL.LinkProgram(shaderProgram);


 int vertexPosLocation = GL.GetAttribLocation(shaderProgram, "vertexPos");
 GL.EnableVertexAttribArray(vertexPosLocation);
 GL.VertexAttribPointer(vertexPosLocation, 2, VertexAttribPointerType.Float, false, 4 * sizeof(float), 0);

 int texCoordLocation = GL.GetAttribLocation(shaderProgram, "texCoord");
 GL.EnableVertexAttribArray(texCoordLocation);
 GL.VertexAttribPointer(texCoordLocation, 2, VertexAttribPointerType.Float, false, 4 * sizeof(float), 2 * sizeof(float));

 GL.UseProgram(shaderProgram);

 GL.ActiveTexture(TextureUnit.Texture0);
 GL.BindTexture(TextureTarget.Texture2D, yTex);
 GL.Uniform1(GL.GetUniformLocation(shaderProgram, "yTex"), 0);

 GL.ActiveTexture(TextureUnit.Texture1);
 GL.BindTexture(TextureTarget.Texture2D, uTex);
 GL.Uniform1(GL.GetUniformLocation(shaderProgram, "uTex"), 1);

 GL.ActiveTexture(TextureUnit.Texture2);
 GL.BindTexture(TextureTarget.Texture2D, vTex);
 GL.Uniform1(GL.GetUniformLocation(shaderProgram, "vTex"), 2);

 GL.BindVertexArray(0);
 //code

 base.OnLoad();
 }

 protected override void OnUnload()
 {
 GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
 GL.DeleteBuffer(vertexBufferHandle);
 GL.UseProgram(0);
 shader.Dispose();

 //code

 base.OnUnload();
 }

 protected override void OnRenderFrame(FrameEventArgs e)
 {

 GL.Clear(ClearBufferMask.ColorBufferBit);

 shader.Use();
 texture.Use(frameNumber++);

 GL.BindVertexArray(vertexArrayHandle);

 GL.DrawElements(PrimitiveType.Triangles, indices.Length, DrawElementsType.UnsignedInt, indices);

 Context.SwapBuffers();

 base.OnRenderFrame(e);
 }

 protected override void OnFramebufferResize(FramebufferResizeEventArgs e)
 {
 base.OnFramebufferResize(e);

 GL.Viewport(0, 0, e.Width, e.Height);
 }
 }
}



And my texture class :


using System;
using OpenTK;
using OpenTK.Graphics.OpenGL4;
using SkiaSharp;
using FFmpeg;
using SkiaSharp.Internals;
using StbImageSharp;
using FFmpeg.AutoGen;
using System.Threading;

namespace myFFmpeg
{
 public class Texture
 {
 int Handle, yTex, uTex, vTex;

 Program program = new Program();

 public Texture()
 {
 Handle = GL.GenTexture();
 }


 public unsafe void Use(int frameNumber)
 {
 GL.BindTexture(TextureTarget.Texture2D, Handle);

 // Generate textures only once (outside the loop)
 if (yTex == 0)
 {
 GL.GenTextures(1, out yTex);
 }
 if (uTex == 0)
 {
 GL.GenTextures(1, out uTex);
 }
 if (vTex == 0)
 {
 GL.GenTextures(1, out vTex);
 }

 // Bind textures to specific units before rendering each frame
 GL.ActiveTexture(TextureUnit.Texture0);
 GL.BindTexture(TextureTarget.Texture2D, yTex);
 GL.ActiveTexture(TextureUnit.Texture1);
 GL.BindTexture(TextureTarget.Texture2D, uTex);
 GL.ActiveTexture(TextureUnit.Texture2);

 // Update textures with new frame data from FFmpeg
 AVFrame frame = program.getFrame();
 int width = frame.width;
 int height = frame.height;

 Console.BackgroundColor = ConsoleColor.White;
 Console.ForegroundColor = ConsoleColor.Black;
 Console.WriteLine((AVPixelFormat)frame.format);
 Console.BackgroundColor = ConsoleColor.Black;


 // Assuming YUV data is stored in separate planes (Y, U, V)
 GL.BindTexture(TextureTarget.Texture2D, yTex);
 GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Luminance, width, height, 0, PixelFormat.Luminance, PixelType.UnsignedByte, (IntPtr)frame.data[0]);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);

 GL.BindTexture(TextureTarget.Texture2D, uTex);
 GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Luminance, width / 2, height / 2, 0, PixelFormat.Luminance, PixelType.UnsignedByte, (IntPtr)frame.data[1]);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);

 GL.BindTexture(TextureTarget.Texture2D, vTex);
 GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Luminance, width / 2, height / 2, 0, PixelFormat.Luminance, PixelType.UnsignedByte, (IntPtr)frame.data[2]);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
 GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);

 }
 }
}




And my shader class :


using OpenTK.Graphics.OpenGL4;
using System;
using System.IO;

namespace myFFmpeg
{
 public class Shader : IDisposable
 {
 public int Handle { get; private set; }

 public Shader(string vertexPath, string fragmentPath)
 {
 string vertexShaderSource = File.ReadAllText(vertexPath);
 string fragmentShaderSource = File.ReadAllText(fragmentPath);

 int vertexShader = GL.CreateShader(ShaderType.VertexShader);
 GL.ShaderSource(vertexShader, vertexShaderSource);
 GL.CompileShader(vertexShader);
 CheckShaderCompilation(vertexShader);

 int fragmentShader = GL.CreateShader(ShaderType.FragmentShader);
 GL.ShaderSource(fragmentShader, fragmentShaderSource);
 GL.CompileShader(fragmentShader);
 CheckShaderCompilation(fragmentShader);

 Handle = GL.CreateProgram();
 GL.AttachShader(Handle, vertexShader);
 GL.AttachShader(Handle, fragmentShader);
 GL.LinkProgram(Handle);
 CheckProgramLinking(Handle);

 GL.DetachShader(Handle, vertexShader);
 GL.DetachShader(Handle, fragmentShader);
 GL.DeleteShader(vertexShader);
 GL.DeleteShader(fragmentShader);
 }

 public void Use()
 {
 GL.UseProgram(Handle);
 }

 public int GetAttribLocation(string attribName)
 {
 return GL.GetAttribLocation(Handle, attribName);
 }

 public int GetUniformLocation(string uniformName)
 {
 return GL.GetUniformLocation(Handle, uniformName);
 }

 private void CheckShaderCompilation(int shader)
 {
 GL.GetShader(shader, ShaderParameter.CompileStatus, out int success);
 if (success == 0)
 {
 string infoLog = GL.GetShaderInfoLog(shader);
 throw new InvalidOperationException($"Shader compilation failed: {infoLog}");
 }
 }

 private void CheckProgramLinking(int program)
 {
 GL.GetProgram(program, GetProgramParameterName.LinkStatus, out int success);
 if (success == 0)
 {
 string infoLog = GL.GetProgramInfoLog(program);
 throw new InvalidOperationException($"Program linking failed: {infoLog}");
 }
 }

 public void Dispose()
 {
 GL.DeleteProgram(Handle);
 }
 }
}



Vert shader


#version 330 core
layout(location = 0) in vec3 vertexPos;
layout(location = 1) in vec2 texCoord;

out vec2 TexCoord; 

void main()
{
 gl_Position = vec4(vertexPos,1.0);
 TexCoord = texCoord;
}



Frag shader


#version 330 core
in vec2 TexCoord;
out vec4 color;

uniform sampler2D yTex;
uniform sampler2D uTex;
uniform sampler2D vTex;

void main()
{
 float y = texture(yTex, TexCoord).r;
 float u = texture(uTex, TexCoord).r - 0.5;
 float v = texture(vTex, TexCoord).r - 0.5;

 // YUV to RGB conversion (BT.709)
 float r = y + 1.5714 * v;
 float g = y - 0.6486 * u - 0.3918 * v;
 float b = y + 1.8556 * u;

 color = vec4(r, g, b, 1.0);
}



I can provide more code, if needed..


I tried changing shaders, changing textures, getting frame using
ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0);


-
Error : Output format mp4 is not available
12 avril 2024, par alpaca pwaaI'm using fluent-ffmpeg in my nextjs application, I'm trying to process the video and specified a format to stream on my s3 bucket but it keeps on failing. My terminal keeps on throwing "Error : Error : Output format mp4 is not available". I already verify my ffmpeg format "ffmpeg -format" and confirm that it supports encoding and decoding mp4 files. I've already tried solutions from other forums but it's not working for me.


createVideo: privateProcedure
 .input(
 z.object({
 fileId: z.string(),
 })
 )
 .mutation(async ({ ctx, input }) => {
 const { getUser } = getKindeServerSession();
 const user = await getUser();

 if (!user || !user.id || !user.email) {
 throw new TRPCError({ code: "UNAUTHORIZED" });
 }

 const dbUser = await db.user.findFirst({
 where: {
 id: user.id,
 },
 });

 if (!dbUser) {
 throw new TRPCError({
 code: "UNAUTHORIZED",
 message: "User not found in the database.",
 });
 }

 const putObjectCommand = new PutObjectCommand({
 Bucket: process.env.AWS_BUCKET_NAME!,
 Key: generateFileName(),
 });

 const s3 = new S3Client({
 region: process.env.AWS_BUCKET_REGION!,
 credentials: {
 accessKeyId: process.env.AWS_ACCESS_KEY!,
 secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!,
 },
 });

 const singedURL = await getSignedUrl(s3, putObjectCommand, {
 expiresIn: 60,
 });

 const ffmpeg = require("fluent-ffmpeg");
 const passthroughStream = new PassThrough();

 ffmpeg({ source: "./template1.mp4" })
 .on("end", async () => {
 console.log("Job done");
 await uploadToS3(passthroughStream);
 })
 .on("error", (error: string) => {
 console.error("Error:", error);
 throw new Error("Error processing video");
 })
 .videoFilter({
 filter: "drawtext",
 options: {
 text: "hi",
 fontsize: 24,
 fontcolor: "white",
 x: "(w-text_w)/2",
 y: "(h-text_h)/2",
 box: 1,
 boxcolor: "black@0.5",
 boxborderw: 5,
 fontfile: "/Windows/fonts/calibri.ttf",
 },
 })
 .videoCodec("libx264")
 .outputFormat("mp4")
 .outputOptions(["-movflags frag_keyframe+empty_moov"])
 .pipe(passthroughStream, { end: true });

 const uploadToS3 = async (stream: PassThrough) => {
 const upload = new Upload({
 client: s3,
 params: {
 Bucket: process.env.AWS_BUCKET_NAME!,
 Key: generateFileName(),
 Body: stream,
 },
 });
 await upload.done();
 };

 await new Promise((resolve, reject) => {
 passthroughStream.on("end", resolve);
 passthroughStream.on("error", reject);
 });

 const createdVideo = await db.video.create({
 data: {
 name: "Test Name",
 url: singedURL.split("?")[0],
 key: singedURL,
 fileId: input.fileId,
 },
 });

 return createdVideo;
 }),



Here's the ffmpeg log.


ffmpeg started on 2024-04-11 at 20:58:56
Report written to "ffmpeg-20240411-205856.log"
Log level: 48
Command line:
"C:\\ProgramData\\chocolatey\\lib\\ffmpeg-full\\tools\\ffmpeg\\bin\\ffmpeg.exe" -i ./template1.mp4 -filter:v "drawtext=text=hi:fontsize=24:fontcolor=white:x=(w-text_w)/2:y=(h-text_h)/2:box=1:boxcolor=black@0.5:boxborderw=5:fontfile=/Windows/fonts/calibri.ttf" -report pipe:1
ffmpeg version 7.0-full_build-www.gyan.dev Copyright (c) 2000-2024 the FFmpeg developers
 built with gcc 13.2.0 (Rev5, Built by MSYS2 project)
 configuration: --enable-gpl --enable-version3 --enable-static --disable-w32threads --disable-autodetect --enable-fontconfig --enable-iconv --enable-gnutls --enable-libxml2 --enable-gmp --enable-bzlib --enable-lzma --enable-libsnappy --enable-zlib --enable-librist --enable-libsrt --enable-libssh --enable-libzmq --enable-avisynth --enable-libbluray --enable-libcaca --enable-sdl2 --enable-libaribb24 --enable-libaribcaption --enable-libdav1d --enable-libdavs2 --enable-libuavs3d --enable-libxevd --enable-libzvbi --enable-librav1e --enable-libsvtav1 --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs2 --enable-libxeve --enable-libxvid --enable-libaom --enable-libjxl --enable-libopenjpeg --enable-libvpx --enable-mediafoundation --enable-libass --enable-frei0r --enable-libfreetype --enable-libfribidi --enable-libharfbuzz --enable-liblensfun --enable-libvidstab --enable-libvmaf --enable-libzimg --enable-amf --enable-cuda-llvm --enable-cuvid --enable-dxva2 --enable-d3d11va --enable-d3d12va --enable-f libavutil 59. 8.100 / 59. 8.100
 libavcodec 61. 3.100 / 61. 3.100
 libavformat 61. 1.100 / 61. 1.100
 libavdevice 61. 1.100 / 61. 1.100
 libavfilter 10. 1.100 / 10. 1.100
 libswscale 8. 1.100 / 8. 1.100
 libswresample 5. 1.100 / 5. 1.100
 libpostproc 58. 1.100 / 58. 1.100
Splitting the commandline.
Reading option '-i' ... matched as input url with argument './template1.mp4'.
Reading option '-filter:v' ... matched as option 'filter' (apply specified filters to audio/video) with argument 'drawtext=text=hi:fontsize=24:fontcolor=white:x=(w-text_w)/2:y=(h-text_h)/2:box=1:boxcolor=black@0.5:boxborderw=5:fontfile=/Windows/fonts/calibri.ttf'.
Reading option '-report' ... matched as option 'report' (generate a report) with argument '1'.
Reading option 'pipe:1' ... matched as output url.
Finished splitting the commandline.
Parsing a group of options: global .
Applying option report (generate a report) with argument 1.
Successfully parsed a group of options.
Parsing a group of options: input url ./template1.mp4.
Successfully parsed a group of options.
Opening an input file: ./template1.mp4.
[AVFormatContext @ 00000262cd0888c0] Opening './template1.mp4' for reading
[file @ 00000262cd0a94c0] Setting default whitelist 'file,crypto,data'
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] Format mov,mp4,m4a,3gp,3g2,mj2 probed with size=2048 and score=100
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] ISO: File Type Major Brand: isom
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] Unknown dref type 0x206c7275 size 12
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] Processing st: 0, edit list 0 - media time: 1024, duration: 126981
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] Offset DTS by 1024 to make first pts zero.
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] Setting codecpar->delay to 2 for stream st: 0
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] Before avformat_find_stream_info() pos: 6965 bytes read:32768 seeks:0 nb_streams:1
[h264 @ 00000262cd0bb140] nal_unit_type: 7(SPS), nal_ref_idc: 3
[h264 @ 00000262cd0bb140] Decoding VUI
[h264 @ 00000262cd0bb140] nal_unit_type: 8(PPS), nal_ref_idc: 3
[h264 @ 00000262cd0bb140] nal_unit_type: 7(SPS), nal_ref_idc: 3
[h264 @ 00000262cd0bb140] Decoding VUI
[h264 @ 00000262cd0bb140] nal_unit_type: 8(PPS), nal_ref_idc: 3
[h264 @ 00000262cd0bb140] Decoding VUI
[h264 @ 00000262cd0bb140] nal_unit_type: 6(SEI), nal_ref_idc: 0
[h264 @ 00000262cd0bb140] nal_unit_type: 7(SPS), nal_ref_idc: 3
[h264 @ 00000262cd0bb140] nal_unit_type: 8(PPS), nal_ref_idc: 3
[h264 @ 00000262cd0bb140] nal_unit_type: 5(IDR), nal_ref_idc: 3
[h264 @ 00000262cd0bb140] Decoding VUI
[h264 @ 00000262cd0bb140] Format yuv420p chosen by get_format().
[h264 @ 00000262cd0bb140] Reinit context to 1088x1920, pix_fmt: yuv420p
[h264 @ 00000262cd0bb140] no picture 
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] All info found
[mov,mp4,m4a,3gp,3g2,mj2 @ 00000262cd0888c0] After avformat_find_stream_info() pos: 82242 bytes read:82242 seeks:0 frames:1
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from './template1.mp4':
 Metadata:
 major_brand : isom
 minor_version : 512
 compatible_brands: isomiso2avc1mp41
 encoder : Lavf58.76.100
 Duration: 00:00:08.27, start: 0.000000, bitrate: 3720 kb/s
 Stream #0:0[0x1](und), 1, 1/15360: Video: h264 (High) (avc1 / 0x31637661), yuv420p(tv, bt709, progressive), 1080x1920, 3714 kb/s, 30 fps, 30 tbr, 15360 tbn (default)
 Metadata:
 handler_name : VideoHandler
 vendor_id : [0][0][0][0]
Successfully opened the file.
Parsing a group of options: output url pipe:1.
Applying option filter:v (apply specified filters to audio/video) with argument drawtext=text=hi:fontsize=24:fontcolor=white:x=(w-text_w)/2:y=(h-text_h)/2:box=1:boxcolor=black@0.5:boxborderw=5:fontfile=/Windows/fonts/calibri.ttf.
Successfully parsed a group of options.
Opening an output file: pipe:1.
[AVFormatContext @ 00000262cd0b2240] Unable to choose an output format for 'pipe:1'; use a standard extension for the filename or specify the format manually.
[out#0 @ 00000262cd0bb300] Error initializing the muxer for pipe:1: Invalid argument
Error opening output file pipe:1.
Error opening output files: Invalid argument
[AVIOContext @ 00000262cd0a9580] Statistics: 82242 bytes read, 0 seeks



I should be able to stream the processed video to my s3, but it keeps on throwing "Error : Error : Output format mp4 is not available"


-
What Is Incrementality & Why Is It Important in Marketing ?
26 mars 2024, par Erin