
Recherche avancée
Autres articles (89)
-
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Use, discuss, criticize
13 avril 2011, parTalk to people directly involved in MediaSPIP’s development, or to people around you who could use MediaSPIP to share, enhance or develop their creative projects.
The bigger the community, the more MediaSPIP’s potential will be explored and the faster the software will evolve.
A discussion list is available for all exchanges between users. -
MediaSPIP en mode privé (Intranet)
17 septembre 2013, parÀ partir de la version 0.3, un canal de MediaSPIP peut devenir privé, bloqué à toute personne non identifiée grâce au plugin "Intranet/extranet".
Le plugin Intranet/extranet, lorsqu’il est activé, permet de bloquer l’accès au canal à tout visiteur non identifié, l’empêchant d’accéder au contenu en le redirigeant systématiquement vers le formulaire d’identification.
Ce système peut être particulièrement utile pour certaines utilisations comme : Atelier de travail avec des enfants dont le contenu ne doit pas (...)
Sur d’autres sites (15143)
-
Anomalie #3239 : _CACHE_CONTEXTES_AJAX génère un dossier /tmp/cache/contextes qui grossit à l’infini
19 juillet 2014, par marcimat ☺☮☯♫Juste un mot tout de même :
En SPIP 3, il ne faut pas déclarer cette constante. La solution intermédiaire qu’on utilise est de passer automatiquement par tmp/cache/contexte SI certaines conditions sont requises, au cas par cas. Particulièrement :
- si bug lors du décryptage du hash ajax (un bug sur une version de php) => on met le contexte en fichier cache
- si la longueur du hash dépasse la longueur autorisée pah suhosin (si présent) => on met le contexte en fichier cache.De la sorte, ça limite grandement le nombre de fichiers créés dans ce cache (et c’est fait de façon automatique).
Mais… ça ne résout pas du tout le problème : ces fichiers restent en cache et ne se nettoient jamais, ce qui potentiellement peut créer le même problème signalé ici.
-
FFmpeg C API set encoded frame’s rotation
26 mars 2015, par Justin BradleyUsing the FFmpeg C API I have encoding and decoding a video working. However, the re-encoded video stream does not maintain the original video’s orientation (rotation). So vertical videos have been flipped horizontal.
I’m not sure how to resolve this. Is there a metadata field that gets set ? Using MediaInfo I see the original video has a metadata field ’Rotation : 90°’ and the new video does not.
Or does each encoded frame need to be rotated vertically ?I’ve looked at the decode frame’s side_data but it is empty.
for (j = 0; j < decoded_frame->nb_side_data; j++) {
AVFrameSideData *sd = decoded_frame->side_data[j];
if(sd->type == AV_FRAME_DATA_DISPLAYMATRIX) {
LOGI("=> displaymatrix: rotation of %.2f degrees", av_display_rotation_get((int32_t *)sd->data));
}
} -
how can l use this jni code to play video in GLSurfaceView,I can noy find the way to use it
2 mai 2016, par MrHuang#include
#include
#include
#include
#include <sys></sys>time.h>
#include
#include "AeeVideo.h"
#include "videorender.h"
#include "decode.h"
#include
#include <android></android>log.h>
#define TAG "AeeVideo"
#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, TAG, format, ##__VA_ARGS__)
#define LOGI(format, ...) __android_log_print(ANDROID_LOG_INFO, TAG, format, ##__VA_ARGS__)
static int g_connectstate = -1;
static DecodeCtx *g_dec_ctx;
static int last_width = 0;
static int last_height = 0;
static int last_picsize = 0;
static uint8_t last_picture[3 * 1024 * 1024];//save color pic
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerSetScreen(JNIEnv * env, jobject obj,jint width,jint height)
{
gl_screen_set(0, 0, width, height);
LOGI("and opengl set screen size (%d,%d,%d,%d)\n",0,0,width,height);
gl_initialize();
Decode_Init();
return 0;
}
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerStart(JNIEnv * env, jobject obj, jstring Url)
{
const char *pUrl = (*env)->GetStringUTFChars(env, Url, 0);
LOGI("stream url %s \n",pUrl);
g_dec_ctx = Decode_OpenStream((char*)pUrl);
if (!g_dec_ctx) {
LOGE("openstream %s,failed!\n", pUrl);
return -1;
}
return 0;
}
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerRender(JNIEnv * env, jobject obj)
{
if(!g_dec_ctx)
return -1;
VideoFrame frame;
int ret = Decode_ReadFrame(g_dec_ctx, &frame);
if ( ret <= 0 ) {
if ( last_picsize > 0 ){
LOGI("disconnect,render last pic\n");
gl_render_frame(last_picture, last_width, last_height);
}
return ret;
}
LOGI("render video frame,pkt w,h:(%d,%d)\n",frame.width,frame.height);
gl_render_frame(frame.data, frame.width, frame.height);
if (last_width != frame.width || last_height != frame.height){
memset(last_picture, 0x80, sizeof(last_picture)); /* gray background */
}
last_width = frame.width;
last_height = frame.height;
last_picsize = frame.size;
memcpy(last_picture, frame.data, frame.width * frame.height); /* copy a gray pic */
return 0;
}
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerStop(JNIEnv * env, jobject obj)
{
LOGI("AeePlayer Stop");
if (g_dec_ctx){
Decode_CloseStream(g_dec_ctx);
}
Decode_Quit();
gl_uninitialize();
return 0;
}
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerSetState(JNIEnv * env, jobject obj, jint state)
{
g_connectstate = state;
LOGI("g_connectstate %d \n",g_connectstate);
return 0;
}
JNIEXPORT jstring JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerGetVersion(JNIEnv * env, jobject obj)
{
char v[10]= "1.0.0";
return (*env)->NewStringUTF(env,v);
}
#define _ANDROID_APP_
#include
#include
#include
#include
#include
#include
#include "videorender.h"
#ifdef _ANDROID_APP_
#include <gles2></gles2>gl2.h>
#include <gles2></gles2>gl2ext.h>
#include <sys></sys>time.h>
#include
#include <android></android>log.h>
#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, "VideoRender", format, ##__VA_ARGS__)
#define LOGI(format, ...) __android_log_print(ANDROID_LOG_INFO, "VideoRender", format, ##__VA_ARGS__)
#endif
#ifdef _IOS_APP_
#include <opengles></opengles>ES2/gl.h>
#include <opengles></opengles>ES2/glext.h>
#endif
GLint ATTRIB_VERTEX, ATTRIB_TEXTURE;
static GLuint g_texYId;
static GLuint g_texUId;
static GLuint g_texVId;
static GLuint simpleProgram;
static int s_x = 0;
static int s_y = 0;
static int s_width = 0;
static int s_height = 0;
static int view_x = 0;
static int view_y = 0;
static int view_width = 0;
static int view_height = 0;
int g_width = 1280;
int g_height = 720;
static const char* FRAG_SHADER =
"varying lowp vec2 tc;\n"
"uniform sampler2D SamplerY;\n"
"uniform sampler2D SamplerU;\n"
"uniform sampler2D SamplerV;\n"
"void main(void)\n"
"{\n"
"mediump vec3 yuv;\n"
"lowp vec3 rgb;\n"
"yuv.x = texture2D(SamplerY, tc).r;\n"
"yuv.y = texture2D(SamplerU, tc).r - 0.5;\n"
"yuv.z = texture2D(SamplerV, tc).r - 0.5;\n"
"rgb = mat3( 1, 1, 1,\n"
"0, -0.39465, 2.03211,\n"
"1.13983, -0.58060, 0) * yuv;\n"
"gl_FragColor = vec4(rgb, 1);\n"
"}\n";
static const char* VERTEX_SHADER =
"attribute vec4 vPosition; \n"
"attribute vec2 a_texCoord; \n"
"varying vec2 tc; \n"
"void main() \n"
"{ \n"
" gl_Position = vPosition; \n"
" tc = a_texCoord; \n"
"} \n";
static GLuint bindTexture(GLuint texture, const char *buffer, GLuint w , GLuint h)
{
glBindTexture ( GL_TEXTURE_2D, texture );
glTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, w, h, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, buffer);
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
return texture;
}
static void renderFrame()
{
static GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
//texture rotate
/*static GLfloat squareVertices[] = {
-1.0f, -0.5f,
0.5f, -1.0f,
-0.5f, 1.0f,
1.0f, 0.5f,
};*/
static GLfloat coordVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
//texture half
/*static GLfloat coordVertices[] = {
0.0f, 1.0f,
0.5f, 1.0f,
0.0f, 0.0f,
0.5f, 0.0f,
};*/
glClearColor(0.0f, 0.0f, 0.0f, 1);
glClear(GL_COLOR_BUFFER_BIT);
GLint tex_y = glGetUniformLocation(simpleProgram, "SamplerY");
GLint tex_u = glGetUniformLocation(simpleProgram, "SamplerU");
GLint tex_v = glGetUniformLocation(simpleProgram, "SamplerV");
//LOGI("tex_y:%d,tex_u:%d,tex_v:%d \n",tex_y,tex_u,tex_v);
ATTRIB_VERTEX = glGetAttribLocation(simpleProgram, "vPosition");
ATTRIB_TEXTURE = glGetAttribLocation(simpleProgram, "a_texCoord");
//LOGI("vertex %d,texture %d",ATTRIB_VERTEX,ATTRIB_TEXTURE);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(ATTRIB_VERTEX);
//LOGI("enableVertexAttribArray vertex");
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, coordVertices);
glEnableVertexAttribArray(ATTRIB_TEXTURE);
//LOGI("enableVertexAttribArray texture");
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, g_texYId);
glUniform1i(tex_y, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, g_texUId);
glUniform1i(tex_u, 1);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, g_texVId);
glUniform1i(tex_v, 2);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
//glutSwapBuffers();//double buffer
//glFlush();//single buffer
}
static GLuint buildShader(const char* source, GLenum shaderType)
{
GLuint shaderHandle = glCreateShader(shaderType);
if (shaderHandle)
{
glShaderSource(shaderHandle, 1, &source, 0);
glCompileShader(shaderHandle);
GLint compiled = 0;
glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compiled);
if (!compiled){
GLint infoLen = 0;
glGetShaderiv(shaderHandle, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen){
char* buf = (char*) malloc(infoLen);
if (buf){
glGetShaderInfoLog(shaderHandle, infoLen, NULL, buf);
LOGE("error::Could not compile shader %d:\n%s\n", shaderType, buf);
free(buf);
}
glDeleteShader(shaderHandle);
shaderHandle = 0;
}
}
}
return shaderHandle;
}
static GLuint buildProgram(const char* vertexShaderSource,
const char* fragmentShaderSource)
{
GLuint vertexShader = buildShader(vertexShaderSource, GL_VERTEX_SHADER);
GLuint fragmentShader = buildShader(fragmentShaderSource, GL_FRAGMENT_SHADER);
GLuint programHandle = glCreateProgram();
if (programHandle)
{
glAttachShader(programHandle, vertexShader);
glAttachShader(programHandle, fragmentShader);
glLinkProgram(programHandle);
GLint linkStatus = GL_FALSE;
glGetProgramiv(programHandle, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(programHandle, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(programHandle, bufLength, NULL, buf);
LOGE("error::Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(programHandle);
programHandle = 0;
}
}
return programHandle;
}
void gl_initialize()
{
LOGI("####gl_initialize###\n");
simpleProgram = buildProgram(VERTEX_SHADER, FRAG_SHADER);
if(!simpleProgram){
LOGE("opengl buildProgram() failed! \n");
return;
}
LOGI("glProgram %d\n",simpleProgram);
glUseProgram(simpleProgram);
glGenTextures(1, &g_texYId);
glGenTextures(1, &g_texUId);
glGenTextures(1, &g_texVId);
LOGI("opengl gentextures end");
}
void gl_uninitialize()
{
LOGI("####gl_uninitialize####");
glDeleteProgram(simpleProgram);
glDeleteTextures(1, &g_texYId);
glDeleteTextures(1, &g_texUId);
glDeleteTextures(1, &g_texVId);
}
void gl_render_frame(const char *buf, int w, int h)
{
if (!buf || w < 0 || h < 0)
{
LOGE("this frame is invalid \n");
return;
}
char *y_buf = (char *)buf;
char *u_buf = y_buf + w * h;
char *v_buf = u_buf + w * h / 4;
gl_viewsize_set(w, h);
glViewport(view_x, view_y, view_width, view_height);
//LOGI("glViewport x,y,width,height=[%d,%d,%d,%d]\n",view_x,view_y,view_width,view_height);
bindTexture(g_texYId, (const char*)y_buf, w, h);
bindTexture(g_texUId, (const char*)u_buf, w/2, h/2);
bindTexture(g_texVId, (const char*)v_buf, w/2, h/2);
renderFrame();
}
void gl_screen_set(int screen_x, int screen_y, int screen_width, int screen_height)
{
s_x = screen_x;
s_y = screen_y;
s_width = screen_width;
s_height = screen_height;
}
void gl_viewsize_set(int frame_width, int frame_height)
{
int view_p = (int)((float)frame_height * 100 / frame_width);
int screen_p = (int)((float)s_height * 100 / s_width);
if (view_p == screen_p) {
view_x = s_x;
view_y = s_y;
view_width = s_width;
view_height = s_height;
}
else if (view_p > screen_p){
view_width = (int)(s_height * 100 / view_p);
view_height = s_height;
view_x = (int)((s_width - view_width) / 2);
view_y = s_y;
}
else if (view_p < screen_p){
view_width = s_width;
view_height = (int)(s_width * view_p / 100) ;
view_x = s_x;
view_y = (int)((s_height - view_height) / 2);
}
}
void gl_imagesize_set(int width, int height)
{
g_width = width;
g_height = height;
}
public class FirstOpenGLProjectJNI {
public static native int AeePlayerSetScreen(int width,int height);
public static native int AeePlayerStart(String url);
public static native int AeePlayerRender();
public static native int AeePlayerStop();
}I want to use
GLSurfaceView
to play video, but the surface is always black. How can I show the video in the GLSurfaceView by useing this jni code.