
Recherche avancée
Médias (16)
-
#7 Ambience
16 octobre 2011, par
Mis à jour : Juin 2015
Langue : English
Type : Audio
-
#6 Teaser Music
16 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Audio
-
#5 End Title
16 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Audio
-
#3 The Safest Place
16 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Audio
-
#4 Emo Creates
15 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Audio
-
#2 Typewriter Dance
15 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Audio
Autres articles (35)
-
Other interesting software
13 avril 2011, parWe don’t claim to be the only ones doing what we do ... and especially not to assert claims to be the best either ... What we do, we just try to do it well and getting better ...
The following list represents softwares that tend to be more or less as MediaSPIP or that MediaSPIP tries more or less to do the same, whatever ...
We don’t know them, we didn’t try them, but you can take a peek.
Videopress
Website : http://videopress.com/
License : GNU/GPL v2
Source code : (...) -
Les formats acceptés
28 janvier 2010, parLes commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
ffmpeg -codecs ffmpeg -formats
Les format videos acceptés en entrée
Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
Les formats vidéos de sortie possibles
Dans un premier temps on (...) -
Ajouter notes et légendes aux images
7 février 2011, parPour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
Modification lors de l’ajout d’un média
Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...)
Sur d’autres sites (6465)
-
YUV decoded by FFMPEG that draw with opengl is not clear
3 juin 2016, par S.JinI’m writing a movie player that use FFMPEG and OpenGL ES. Movie can be decoded succeessfully, but when I use AVFrame as texture to draw in my screen, I found it was so fuzzy. I don’t know where wrong in my code. If I change the AVFrame from YUV to RGB image, it will be clear.
Does any one know why use YUV as texture to draw will be not clear ?My render code :
#import "SJGLView.h"
#import <glkit></glkit>GLKit.h>
#import "SJDecoder.h"
#include "libavutil/pixfmt.h"
// MARK: - C Function
static void sj_logShaderError(GLuint shader) {
GLint info_len = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_len);
if (info_len == 0) NSLog(@"Empty info");
else {
GLchar *log = (GLchar *)malloc(info_len);
glGetShaderInfoLog(shader, info_len, &info_len, log);
NSLog(@"Shader compile log: %s", log);
}
}
static void sj_logProgramError(GLuint program) {
int info_length;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_length);
if (info_length) {
GLchar *log = (GLchar *)malloc(info_length);
glGetProgramInfoLog(program, info_length, &info_length, log);
NSLog(@"Program link log: %s", log);
}
}
GLuint sj_loadShader(GLenum shader_type, const char* shader_source) {
GLuint shader = glCreateShader(shader_type);
glShaderSource(shader, 1, &shader_source, NULL);
glCompileShader(shader);
GLint compile_status = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compile_status);
if (!compile_status) goto fail;
return shader;
fail:
if (shader) {
sj_logShaderError(shader);
glDeleteShader(shader);
}
return 0;
}
void loadOrtho(float *matrix, float left, float right, float bottom, float top, float near, float far) {
float r_l = right - left;
float t_b = top - bottom;
float f_n = far - near;
float tx = (right + left)/(right - left);
float ty = (top + bottom)/(top - bottom);
float tz = (far + near)/(far - near);
matrix[0] = 2.0f / r_l;
matrix[1] = 0.0f;
matrix[2] = 0.0f;
matrix[3] = 0.0f;
matrix[4] = 0.0f;
matrix[5] = 2.0f / t_b;
matrix[6] = 0.0f;
matrix[7] = 0.0f;
matrix[8] = 0.0f;
matrix[9] = 0.0f;
matrix[10] = -2.0f / f_n;
matrix[11] = 0.0f;
matrix[12] = tx;
matrix[13] = ty;
matrix[14] = tz;
matrix[15] = 1.0f;
}
// BT.709, standard for HDTV
static const GLfloat g_bt709[] = {
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
const GLfloat *getColorMatrix_bt709() {
return g_bt709;
}
enum {
ATTRIBUTE_VERTEX,
ATTRIBUTE_TEXCOORD,
};
@implementation SJGLView {
EAGLContext *_context;
GLuint _framebuffer;
GLuint _renderbuffer;
GLint _backingWidth;
GLint _backingHeight;
GLfloat _vertices[8];
GLuint _program;
GLuint _av4Position;
GLuint _av2Texcoord;
GLuint _um4Mvp;
GLfloat _texcoords[8];
GLuint _us2Sampler[3];
GLuint _um3ColorConversion;
GLuint _textures[3];
SJDecoder *_decoder;
}
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (instancetype)initWithFrame:(CGRect)frame decoder:(SJDecoder *)decoder {
self = [super initWithFrame:frame];
if (self) {
_decoder = decoder;
[self setupGL];
}
return self;
}
- (void)layoutSubviews {
glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
[self updateVertices];
[self render: nil];
}
- (void)setContentMode:(UIViewContentMode)contentMode
{
[super setContentMode:contentMode];
[self updateVertices];
[self render:nil];
}
- (void)setupGL {
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
NSAssert(_context != nil, @"Failed to init EAGLContext");
CAEAGLLayer *eaglLayer= (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = @{
kEAGLDrawablePropertyRetainedBacking: [NSNumber numberWithBool:YES],
kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8
};
[EAGLContext setCurrentContext:_context];
if ([self setupEAGLContext]) {
NSLog(@"Success to setup EAGLContext");
if ([self loadShaders]) {
NSLog(@"Success to load shader");
_us2Sampler[0] = glGetUniformLocation(_program, "us2_SamplerX");
_us2Sampler[1] = glGetUniformLocation(_program, "us2_SamplerY");
_us2Sampler[2] = glGetUniformLocation(_program, "us2_SamplerZ");
_um3ColorConversion = glGetUniformLocation(_program, "um3_ColorConversion");
}
}
}
- (BOOL)setupEAGLContext {
glGenFramebuffers(1, &_framebuffer);
glGenRenderbuffers(1, &_renderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderbuffer);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
NSLog(@"Failed to make complete framebuffer object: %x", status);
return NO;
}
GLenum glError = glGetError();
if (glError != GL_NO_ERROR) {
NSLog(@"Failed to setup EAGLContext: %x", glError);
return NO;
}
return YES;
}
- (BOOL)loadShaders {
NSString *vertexPath = [[NSBundle mainBundle] pathForResource:@"vertex" ofType:@"vsh"];
const char *vertexString = [[NSString stringWithContentsOfFile:vertexPath encoding:NSUTF8StringEncoding error:nil] UTF8String];
NSString *fragmentPath = _decoder.format == SJVideoFrameFormatYUV ? [[NSBundle mainBundle] pathForResource:@"yuv420p" ofType:@"fsh"] :
[[NSBundle mainBundle] pathForResource:@"rgb" ofType:@"fsh"];
const char *fragmentString = [[NSString stringWithContentsOfFile:fragmentPath encoding:NSUTF8StringEncoding error:nil] UTF8String];
GLuint vertexShader = sj_loadShader(GL_VERTEX_SHADER, vertexString);
GLuint fragmentShader = sj_loadShader(GL_FRAGMENT_SHADER, fragmentString);
_program = glCreateProgram();
glAttachShader(_program, vertexShader);
glAttachShader(_program, fragmentShader);
glLinkProgram(_program);
GLint link_status = GL_FALSE;
glGetProgramiv(_program, GL_LINK_STATUS, &link_status);
if(!link_status) goto fail;
_av4Position = glGetAttribLocation(_program, "av4_Position");
_av2Texcoord = glGetAttribLocation(_program, "av2_Texcoord");
_um4Mvp = glGetUniformLocation(_program, "um4_ModelViewProjection");
return YES;
fail:
sj_logProgramError(_program);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
glDeleteProgram(_program);
return NO;
}
- (void)useRenderer {
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glUseProgram(_program);
if (0 == _textures[0]) glGenTextures(3, _textures);
for (int i = 0; i < 3; i++) {
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glUniform1i(_us2Sampler[i], i);
}
glUniformMatrix3fv(_um3ColorConversion, 1, GL_FALSE, getColorMatrix_bt709());
}
- (void)uploadTexture:(SJVideoFrame *)frame {
if (frame.format == SJVideoFrameFormatYUV) {
SJVideoYUVFrame *yuvFrame = (SJVideoYUVFrame *)frame;
const GLubyte *pixel[3] = { yuvFrame.luma.bytes, yuvFrame.chromaB.bytes, yuvFrame.chromaR.bytes };
const GLsizei widths[3] = { yuvFrame.width, yuvFrame.width/2, yuvFrame.width/2 };
const GLsizei heights[3] = { yuvFrame.height, yuvFrame.height/2, yuvFrame.height/2 };
for (int i = 0; i < 3; i++) {
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widths[i], heights[i], 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, pixel[i]);
}
}
}
- (void)render:(SJVideoFrame *)frame {
[EAGLContext setCurrentContext:_context];
glUseProgram(_program);
[self useRenderer];
GLfloat modelviewProj[16];
loadOrtho(modelviewProj, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f);
glUniformMatrix4fv(_um4Mvp, 1, GL_FALSE, modelviewProj);
[self updateVertices];
[self updateTexcoords];
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glViewport(0, 0, _backingWidth, _backingHeight);
[self uploadTexture:frame];
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
[_context presentRenderbuffer:GL_RENDERBUFFER];
}
- (void)updateVertices {
[self resetVertices];
BOOL fit = (self.contentMode == UIViewContentModeScaleAspectFit);
float width = _decoder.frameWidth;
float height = _decoder.frameHeight;
const float dW = (float)_backingWidth / width;
const float dH = (float)_backingHeight / height;
float dd = fit ? MIN(dH, dW) : MAX(dH, dW);
float nW = (width * dd / (float)_backingWidth);
float nH = (height * dd / (float)_backingHeight);
_vertices[0] = -nW;
_vertices[1] = -nH;
_vertices[2] = nW;
_vertices[3] = -nH;
_vertices[4] = -nW;
_vertices[5] = nH;
_vertices[6] = nW;
_vertices[7] = nH;
glVertexAttribPointer(_av4Position, 2, GL_FLOAT, GL_FALSE, 0, _vertices);
glEnableVertexAttribArray(_av4Position);
}
- (void)resetVertices {
_vertices[0] = -1.0f;
_vertices[1] = -1.0f;
_vertices[2] = 1.0f;
_vertices[3] = -1.0f;
_vertices[4] = -1.0f;
_vertices[5] = 1.0f;
_vertices[6] = 1.0f;
_vertices[7] = 1.0f;
}
- (void)updateTexcoords {
[self resetTexcoords];
glVertexAttribPointer(_av2Texcoord, 2, GL_FLOAT, GL_FALSE, 0, _texcoords);
glEnableVertexAttribArray(_av2Texcoord);
}
- (void)resetTexcoords {
_texcoords[0] = 0.0f;
_texcoords[1] = 1.0f;
_texcoords[2] = 1.0f;
_texcoords[3] = 1.0f;
_texcoords[4] = 0.0f;
_texcoords[5] = 0.0f;
_texcoords[6] = 1.0f;
_texcoords[7] = 0.0f;
}.fsh :
precision highp float;
varying highp vec2 vv2_Texcoord;
uniform mat3 um3_ColorConversion;
uniform lowp sampler2D us2_SamplerX;
uniform lowp sampler2D us2_SamplerY;
uniform lowp sampler2D us2_SamplerZ;
void main() {
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = (texture2D(us2_SamplerX, vv2_Texcoord).r - (16.0/255.0));
yuv.y = (texture2D(us2_SamplerY, vv2_Texcoord).r - 0.5);
yuv.z = (texture2D(us2_SamplerZ, vv2_Texcoord).r - 0.5);
rgb = um3_ColorConversion * yuva;
gl_FragColor = vec4(rgb, 1.0);
}.vsh file :
precision highp float;
varying highp vec2 vv2_Texcoord;
uniform lowp sampler2D us2_SamplerX;
void main() {
gl_FragColor = vec4(texture2D(us2_SamplerX, vv2_Texcoord).rgb, 1)
}Result image :
resultThe rgb image :
RGB image -
Revision dd5510750a : third_party/libwebm : pull from upstream. Upstream hash : a58c32339e06e5d672a58cd
10 septembre 2015, par Vignesh VenkatasubramanianChanged Paths :
Modify /examples.mk
Modify /third_party/libwebm/README.libvpx
Modify /third_party/libwebm/mkvmuxer.hpp
Modify /third_party/libwebm/mkvparser.cpp
Modify /third_party/libwebm/mkvparser.hpp
Modify /third_party/libwebm/webmids.hpp
third_party/libwebm : pull from upstream.Upstream hash : a58c32339e06e5d672a58cdd5844cea0a661e735
Changes from upstream since last update :
a58c323 mkvmuxer : Add codec id constant for VP10.
714f3c4 mkvparser : validate results in EBMLHeader::Parse.
cec98d4 mkvparser : Correct the ReadID implementation.
eb36ae4 Merge changes I029a268e,Ia272b150,I5c4d1bbc,Ia47a2478,I3a2e2226
229f493 Merge "mkvparser : Segment::AppendCluster asserts to error checks."
287faf9 Merge "mkvparser : Segment::DoLoadClusterUnknownSize asserts to error
checks."
1a87b59 Merge "mkvparser : Segment assert clean up."
d26ec69 mkvparser : Cluster::Parse clean up.
f2029be mkvparser : Disallow EBML IDs equal to 0.
19f5694 mkvparser : Cluster::Load clean up.
27a07c1 mkvparser : Segment::Load asserts to error checks.
d0313dd mkvparser : Segment::PreloadCluster asserts to error checks.
b108695 mkvparser : Segment::AppendCluster asserts to error checks.
4630f80 mkvparser : Segment::DoLoadClusterUnknownSize asserts to error checks.
841a9b5 mkvparser : Segment assert clean up.
8c4ca2e Merge "mkvparser : Make mkvparser namespace usage uniform."
49ae6f0 Merge "mkvparser : Fix include order."
0735bb5 mkvparser : Make mkvparser namespace usage uniform.
93b24c4 mkvparser : Fix include order.
a57d660 sample_muxer : fix Segment memory leak on error
1c5bd94 mkvparser : Cues, change asserts to error checks.
7f77201 Merge "mkvparser : Add ReadID."
795fd56 mkvparser : set kMaxAllocSize explicitly
23bb18b mkvparser : Add ReadID.
7b57e37 mkvparser : add SafeArrayAlloc.
83a1f68 mkvparser : Remove buf_t typedef.
5074714 Merge changes Ia1265a63,I799d54df,Icfc582e4,I3425f608
b181105 Merge changes Ie4318152,I1e65f30f
06b4337 Block::Parse : replace pos asserts w/checks
b366a98 Cluster::ParseBlockGroup : replace pos asserts w/checks
2857b23 Tags: :*::Parse : replace pos asserts w/checks
f1b2cfa Chapters: :*::Parse : replace pos asserts w/checks
ca80629 Merge "mkvparser : Cues::PreloadCuePoint now returns bool."
6b4b297 Block::Parse : use int64 to aggregate laced frame sizes
c0d2c98 UnserializeFloat : check result for Inf/NaN
1a6dc4f mkvparser : Cues::PreloadCuePoint now returns bool.
275ac22 mkvparser : Cluster::Create clean up.
064f2ee Segment::PreloadCluster() : return a bool status
3778408 Segment::AppendCluster() : return a bool status
e86d046 mkvparser : check Cluster::Create() return
f9885b5 mkvparser : check allocations
21ee398 mkvparser : Segment::Load fail w/missing info/tracks
08fb654 Merge changes I264e68b2,Ife6190a4,Ibf37245f,I06efadb5,I88b5dfec, ...
c896095 mkvparser/Cluster : convert asserts to failure returns
680b4bf mkvparser/Tracks : convert asserts to failure returns
5889e6c mkvparser/Track : convert asserts to failure returns
5135c4c mkvparser/ContentEncoding : convert asserts to failure returns
b0e4f32 mkvparser/Cues : convert asserts to failure returns
13ccc7f mkvparser/UnserializeInt : fix sign flip
db3f9bb mkvparser/SeekHead : convert asserts to failure returns
8de3654 mkvparser/Segment : convert asserts to failure returns
fa2aa7d SeekHead::Parse() : fix assertion failure
d9bdade sample,_muxer : check SegmentInfo::GetInfo() return
07a9cf7 Merge "mkvparser : Remove some asserts."
c56ee29 mkvparser : Remove some asserts.
d901324 Merge "mkvparser : Remove some asserts from SegmentInfo::Parse."
7f7d898 Fix case sensitivity issue in iosbuild.sh.
42fe2cd mkvparser : Remove some asserts from SegmentInfo::Parse.
8bccd9c Merge "mkvparser : avoid rollover in UnserializeInt()."
7a2fa0d mkvparser : avoid rollover in UnserializeInt().
44f5ce6 mkvparser : Disallow durations in seconds greater than LONG_LONG_MAX.
b521e30 Merge "mkvparser : Segment::ParseHeaders() avoid rollover and bad int
sizes."
7680e2a mkvparser : Check for errors in Match().
39a315f mkvparser : Segment::ParseHeaders() avoid rollover and bad int sizes.
f250ace mkvparser : Handle invalid lengths and rollover in ParseElementHeader().
cd96a76 mkvparser : Avoid rollover/truncation in UnserializeString().
8e8b3db Merge "mkvparser : Add error checking in Block::Parse."
82b7e5f sample : correct mbstowcs() error check
04d7809 sample : check allocation return
986b64b mkvparser : Add error checking in Block::Parse.Change-Id : I39beef84962d6341f8ce53be06807b3e2068f777
-
How to change clarity of an h264 video which decoded by FFMPEG and rendered by opengl
5 juin 2016, par S.JinI’m writing a movie player that use FFMPEG and OpenGL ES. Movie can be decoded succeessfully, but when I use AVFrame as texture to draw in my screen, I found it was so fuzzy. I don’t know where wrong in my code. If I change the AVFrame from YUV to RGB image, it will be clear.
Does any one know why use YUV as texture to draw will be not clear ?My render code :
#import "SJGLView.h"
#import <glkit></glkit>GLKit.h>
#import "SJDecoder.h"
#include "libavutil/pixfmt.h"
// MARK: - C Function
static void sj_logShaderError(GLuint shader) {
GLint info_len = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_len);
if (info_len == 0) NSLog(@"Empty info");
else {
GLchar *log = (GLchar *)malloc(info_len);
glGetShaderInfoLog(shader, info_len, &info_len, log);
NSLog(@"Shader compile log: %s", log);
}
}
static void sj_logProgramError(GLuint program) {
int info_length;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_length);
if (info_length) {
GLchar *log = (GLchar *)malloc(info_length);
glGetProgramInfoLog(program, info_length, &info_length, log);
NSLog(@"Program link log: %s", log);
}
}
GLuint sj_loadShader(GLenum shader_type, const char* shader_source) {
GLuint shader = glCreateShader(shader_type);
glShaderSource(shader, 1, &shader_source, NULL);
glCompileShader(shader);
GLint compile_status = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compile_status);
if (!compile_status) goto fail;
return shader;
fail:
if (shader) {
sj_logShaderError(shader);
glDeleteShader(shader);
}
return 0;
}
void loadOrtho(float *matrix, float left, float right, float bottom, float top, float near, float far) {
float r_l = right - left;
float t_b = top - bottom;
float f_n = far - near;
float tx = (right + left)/(right - left);
float ty = (top + bottom)/(top - bottom);
float tz = (far + near)/(far - near);
matrix[0] = 2.0f / r_l;
matrix[1] = 0.0f;
matrix[2] = 0.0f;
matrix[3] = 0.0f;
matrix[4] = 0.0f;
matrix[5] = 2.0f / t_b;
matrix[6] = 0.0f;
matrix[7] = 0.0f;
matrix[8] = 0.0f;
matrix[9] = 0.0f;
matrix[10] = -2.0f / f_n;
matrix[11] = 0.0f;
matrix[12] = tx;
matrix[13] = ty;
matrix[14] = tz;
matrix[15] = 1.0f;
}
// BT.709, standard for HDTV
static const GLfloat g_bt709[] = {
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
const GLfloat *getColorMatrix_bt709() {
return g_bt709;
}
enum {
ATTRIBUTE_VERTEX,
ATTRIBUTE_TEXCOORD,
};
@implementation SJGLView {
EAGLContext *_context;
GLuint _framebuffer;
GLuint _renderbuffer;
GLint _backingWidth;
GLint _backingHeight;
GLfloat _vertices[8];
GLuint _program;
GLuint _av4Position;
GLuint _av2Texcoord;
GLuint _um4Mvp;
GLfloat _texcoords[8];
GLuint _us2Sampler[3];
GLuint _um3ColorConversion;
GLuint _textures[3];
SJDecoder *_decoder;
}
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (instancetype)initWithFrame:(CGRect)frame decoder:(SJDecoder *)decoder {
self = [super initWithFrame:frame];
if (self) {
_decoder = decoder;
[self setupGL];
}
return self;
}
- (void)layoutSubviews {
glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
[self updateVertices];
[self render: nil];
}
- (void)setContentMode:(UIViewContentMode)contentMode
{
[super setContentMode:contentMode];
[self updateVertices];
[self render:nil];
}
- (void)setupGL {
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
NSAssert(_context != nil, @"Failed to init EAGLContext");
CAEAGLLayer *eaglLayer= (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = @{
kEAGLDrawablePropertyRetainedBacking: [NSNumber numberWithBool:YES],
kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8
};
[EAGLContext setCurrentContext:_context];
if ([self setupEAGLContext]) {
NSLog(@"Success to setup EAGLContext");
if ([self loadShaders]) {
NSLog(@"Success to load shader");
_us2Sampler[0] = glGetUniformLocation(_program, "us2_SamplerX");
_us2Sampler[1] = glGetUniformLocation(_program, "us2_SamplerY");
_us2Sampler[2] = glGetUniformLocation(_program, "us2_SamplerZ");
_um3ColorConversion = glGetUniformLocation(_program, "um3_ColorConversion");
}
}
}
- (BOOL)setupEAGLContext {
glGenFramebuffers(1, &_framebuffer);
glGenRenderbuffers(1, &_renderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderbuffer);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
NSLog(@"Failed to make complete framebuffer object: %x", status);
return NO;
}
GLenum glError = glGetError();
if (glError != GL_NO_ERROR) {
NSLog(@"Failed to setup EAGLContext: %x", glError);
return NO;
}
return YES;
}
- (BOOL)loadShaders {
NSString *vertexPath = [[NSBundle mainBundle] pathForResource:@"vertex" ofType:@"vsh"];
const char *vertexString = [[NSString stringWithContentsOfFile:vertexPath encoding:NSUTF8StringEncoding error:nil] UTF8String];
NSString *fragmentPath = _decoder.format == SJVideoFrameFormatYUV ? [[NSBundle mainBundle] pathForResource:@"yuv420p" ofType:@"fsh"] :
[[NSBundle mainBundle] pathForResource:@"rgb" ofType:@"fsh"];
const char *fragmentString = [[NSString stringWithContentsOfFile:fragmentPath encoding:NSUTF8StringEncoding error:nil] UTF8String];
GLuint vertexShader = sj_loadShader(GL_VERTEX_SHADER, vertexString);
GLuint fragmentShader = sj_loadShader(GL_FRAGMENT_SHADER, fragmentString);
_program = glCreateProgram();
glAttachShader(_program, vertexShader);
glAttachShader(_program, fragmentShader);
glLinkProgram(_program);
GLint link_status = GL_FALSE;
glGetProgramiv(_program, GL_LINK_STATUS, &link_status);
if(!link_status) goto fail;
_av4Position = glGetAttribLocation(_program, "av4_Position");
_av2Texcoord = glGetAttribLocation(_program, "av2_Texcoord");
_um4Mvp = glGetUniformLocation(_program, "um4_ModelViewProjection");
return YES;
fail:
sj_logProgramError(_program);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
glDeleteProgram(_program);
return NO;
}
- (void)useRenderer {
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glUseProgram(_program);
if (0 == _textures[0]) glGenTextures(3, _textures);
for (int i = 0; i < 3; i++) {
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glUniform1i(_us2Sampler[i], i);
}
glUniformMatrix3fv(_um3ColorConversion, 1, GL_FALSE, getColorMatrix_bt709());
}
- (void)uploadTexture:(SJVideoFrame *)frame {
if (frame.format == SJVideoFrameFormatYUV) {
SJVideoYUVFrame *yuvFrame = (SJVideoYUVFrame *)frame;
const GLubyte *pixel[3] = { yuvFrame.luma.bytes, yuvFrame.chromaB.bytes, yuvFrame.chromaR.bytes };
const GLsizei widths[3] = { yuvFrame.width, yuvFrame.width/2, yuvFrame.width/2 };
const GLsizei heights[3] = { yuvFrame.height, yuvFrame.height/2, yuvFrame.height/2 };
for (int i = 0; i < 3; i++) {
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widths[i], heights[i], 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, pixel[i]);
}
}
}
- (void)render:(SJVideoFrame *)frame {
[EAGLContext setCurrentContext:_context];
glUseProgram(_program);
[self useRenderer];
GLfloat modelviewProj[16];
loadOrtho(modelviewProj, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f);
glUniformMatrix4fv(_um4Mvp, 1, GL_FALSE, modelviewProj);
[self updateVertices];
[self updateTexcoords];
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glViewport(0, 0, _backingWidth, _backingHeight);
[self uploadTexture:frame];
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);
[_context presentRenderbuffer:GL_RENDERBUFFER];
}
- (void)updateVertices {
[self resetVertices];
BOOL fit = (self.contentMode == UIViewContentModeScaleAspectFit);
float width = _decoder.frameWidth;
float height = _decoder.frameHeight;
const float dW = (float)_backingWidth / width;
const float dH = (float)_backingHeight / height;
float dd = fit ? MIN(dH, dW) : MAX(dH, dW);
float nW = (width * dd / (float)_backingWidth);
float nH = (height * dd / (float)_backingHeight);
_vertices[0] = -nW;
_vertices[1] = -nH;
_vertices[2] = nW;
_vertices[3] = -nH;
_vertices[4] = -nW;
_vertices[5] = nH;
_vertices[6] = nW;
_vertices[7] = nH;
glVertexAttribPointer(_av4Position, 2, GL_FLOAT, GL_FALSE, 0, _vertices);
glEnableVertexAttribArray(_av4Position);
}
- (void)resetVertices {
_vertices[0] = -1.0f;
_vertices[1] = -1.0f;
_vertices[2] = 1.0f;
_vertices[3] = -1.0f;
_vertices[4] = -1.0f;
_vertices[5] = 1.0f;
_vertices[6] = 1.0f;
_vertices[7] = 1.0f;
}
- (void)updateTexcoords {
[self resetTexcoords];
glVertexAttribPointer(_av2Texcoord, 2, GL_FLOAT, GL_FALSE, 0, _texcoords);
glEnableVertexAttribArray(_av2Texcoord);
}
- (void)resetTexcoords {
_texcoords[0] = 0.0f;
_texcoords[1] = 1.0f;
_texcoords[2] = 1.0f;
_texcoords[3] = 1.0f;
_texcoords[4] = 0.0f;
_texcoords[5] = 0.0f;
_texcoords[6] = 1.0f;
_texcoords[7] = 0.0f;
}.fsh :
precision highp float;
varying highp vec2 vv2_Texcoord;
uniform mat3 um3_ColorConversion;
uniform lowp sampler2D us2_SamplerX;
uniform lowp sampler2D us2_SamplerY;
uniform lowp sampler2D us2_SamplerZ;
void main() {
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = (texture2D(us2_SamplerX, vv2_Texcoord).r - (16.0/255.0));
yuv.y = (texture2D(us2_SamplerY, vv2_Texcoord).r - 0.5);
yuv.z = (texture2D(us2_SamplerZ, vv2_Texcoord).r - 0.5);
rgb = um3_ColorConversion * yuva;
gl_FragColor = vec4(rgb, 1.0);
}.vsh file :
precision highp float;
varying highp vec2 vv2_Texcoord;
uniform lowp sampler2D us2_SamplerX;
void main() {
gl_FragColor = vec4(texture2D(us2_SamplerX, vv2_Texcoord).rgb, 1)
}The rgb image :
RGB image
Update to add a GL_NEAREST image :