
Recherche avancée
Médias (1)
-
Sintel MP4 Surround 5.1 Full
13 mai 2011, par
Mis à jour : Février 2012
Langue : English
Type : Video
Autres articles (34)
-
Other interesting software
13 avril 2011, parWe don’t claim to be the only ones doing what we do ... and especially not to assert claims to be the best either ... What we do, we just try to do it well and getting better ...
The following list represents softwares that tend to be more or less as MediaSPIP or that MediaSPIP tries more or less to do the same, whatever ...
We don’t know them, we didn’t try them, but you can take a peek.
Videopress
Website : http://videopress.com/
License : GNU/GPL v2
Source code : (...) -
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir -
Keeping control of your media in your hands
13 avril 2011, parThe vocabulary used on this site and around MediaSPIP in general, aims to avoid reference to Web 2.0 and the companies that profit from media-sharing.
While using MediaSPIP, you are invited to avoid using words like "Brand", "Cloud" and "Market".
MediaSPIP is designed to facilitate the sharing of creative media online, while allowing authors to retain complete control of their work.
MediaSPIP aims to be accessible to as many people as possible and development is based on expanding the (...)
Sur d’autres sites (5814)
-
OpenCV could not find H264 Encoder
7 juin 2016, par Ahmed NassarIm trying to write a video to file using OpenCV on ROS to MP4 container using H264 encoding for web, but I keep getting :
Could not find encoder for codec id 28: Encoder not found
This is my code :
frame_writer.open(pathfile, CV_FOURCC('X','2','6','4'),framerate, cv::Size(vpre[0].cols, vpre[0].rows),true);
I tried CV_FOURCC(’X’,’2’,’6’,’4’), CV_FOURCC(’H’,’2’,’6’,’4’) and it didn’t work.
I reinstalled my ffmpeg from source, and when I run ffmpeg -codecs I get :
DEV.LS h264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (decoders: h264 h264_vdpau ) (encoders: libx264 libx264rgb )
I don’t understand why Im still getting the error.
-
Révision 23023 : complément à r20876 : pas de data dans le champ url_site
6 juin 2016, par brunobergot@gmail.com -
how can l use this jni code to play video in GLSurfaceView,I can noy find the way to use it
2 mai 2016, par MrHuang#include
#include
#include
#include
#include <sys></sys>time.h>
#include
#include "AeeVideo.h"
#include "videorender.h"
#include "decode.h"
#include
#include <android></android>log.h>
#define TAG "AeeVideo"
#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, TAG, format, ##__VA_ARGS__)
#define LOGI(format, ...) __android_log_print(ANDROID_LOG_INFO, TAG, format, ##__VA_ARGS__)
static int g_connectstate = -1;
static DecodeCtx *g_dec_ctx;
static int last_width = 0;
static int last_height = 0;
static int last_picsize = 0;
static uint8_t last_picture[3 * 1024 * 1024];//save color pic
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerSetScreen(JNIEnv * env, jobject obj,jint width,jint height)
{
gl_screen_set(0, 0, width, height);
LOGI("and opengl set screen size (%d,%d,%d,%d)\n",0,0,width,height);
gl_initialize();
Decode_Init();
return 0;
}
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerStart(JNIEnv * env, jobject obj, jstring Url)
{
const char *pUrl = (*env)->GetStringUTFChars(env, Url, 0);
LOGI("stream url %s \n",pUrl);
g_dec_ctx = Decode_OpenStream((char*)pUrl);
if (!g_dec_ctx) {
LOGE("openstream %s,failed!\n", pUrl);
return -1;
}
return 0;
}
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerRender(JNIEnv * env, jobject obj)
{
if(!g_dec_ctx)
return -1;
VideoFrame frame;
int ret = Decode_ReadFrame(g_dec_ctx, &frame);
if ( ret <= 0 ) {
if ( last_picsize > 0 ){
LOGI("disconnect,render last pic\n");
gl_render_frame(last_picture, last_width, last_height);
}
return ret;
}
LOGI("render video frame,pkt w,h:(%d,%d)\n",frame.width,frame.height);
gl_render_frame(frame.data, frame.width, frame.height);
if (last_width != frame.width || last_height != frame.height){
memset(last_picture, 0x80, sizeof(last_picture)); /* gray background */
}
last_width = frame.width;
last_height = frame.height;
last_picsize = frame.size;
memcpy(last_picture, frame.data, frame.width * frame.height); /* copy a gray pic */
return 0;
}
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerStop(JNIEnv * env, jobject obj)
{
LOGI("AeePlayer Stop");
if (g_dec_ctx){
Decode_CloseStream(g_dec_ctx);
}
Decode_Quit();
gl_uninitialize();
return 0;
}
JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerSetState(JNIEnv * env, jobject obj, jint state)
{
g_connectstate = state;
LOGI("g_connectstate %d \n",g_connectstate);
return 0;
}
JNIEXPORT jstring JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerGetVersion(JNIEnv * env, jobject obj)
{
char v[10]= "1.0.0";
return (*env)->NewStringUTF(env,v);
}
#define _ANDROID_APP_
#include
#include
#include
#include
#include
#include
#include "videorender.h"
#ifdef _ANDROID_APP_
#include <gles2></gles2>gl2.h>
#include <gles2></gles2>gl2ext.h>
#include <sys></sys>time.h>
#include
#include <android></android>log.h>
#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, "VideoRender", format, ##__VA_ARGS__)
#define LOGI(format, ...) __android_log_print(ANDROID_LOG_INFO, "VideoRender", format, ##__VA_ARGS__)
#endif
#ifdef _IOS_APP_
#include <opengles></opengles>ES2/gl.h>
#include <opengles></opengles>ES2/glext.h>
#endif
GLint ATTRIB_VERTEX, ATTRIB_TEXTURE;
static GLuint g_texYId;
static GLuint g_texUId;
static GLuint g_texVId;
static GLuint simpleProgram;
static int s_x = 0;
static int s_y = 0;
static int s_width = 0;
static int s_height = 0;
static int view_x = 0;
static int view_y = 0;
static int view_width = 0;
static int view_height = 0;
int g_width = 1280;
int g_height = 720;
static const char* FRAG_SHADER =
"varying lowp vec2 tc;\n"
"uniform sampler2D SamplerY;\n"
"uniform sampler2D SamplerU;\n"
"uniform sampler2D SamplerV;\n"
"void main(void)\n"
"{\n"
"mediump vec3 yuv;\n"
"lowp vec3 rgb;\n"
"yuv.x = texture2D(SamplerY, tc).r;\n"
"yuv.y = texture2D(SamplerU, tc).r - 0.5;\n"
"yuv.z = texture2D(SamplerV, tc).r - 0.5;\n"
"rgb = mat3( 1, 1, 1,\n"
"0, -0.39465, 2.03211,\n"
"1.13983, -0.58060, 0) * yuv;\n"
"gl_FragColor = vec4(rgb, 1);\n"
"}\n";
static const char* VERTEX_SHADER =
"attribute vec4 vPosition; \n"
"attribute vec2 a_texCoord; \n"
"varying vec2 tc; \n"
"void main() \n"
"{ \n"
" gl_Position = vPosition; \n"
" tc = a_texCoord; \n"
"} \n";
static GLuint bindTexture(GLuint texture, const char *buffer, GLuint w , GLuint h)
{
glBindTexture ( GL_TEXTURE_2D, texture );
glTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, w, h, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, buffer);
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
return texture;
}
static void renderFrame()
{
static GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
//texture rotate
/*static GLfloat squareVertices[] = {
-1.0f, -0.5f,
0.5f, -1.0f,
-0.5f, 1.0f,
1.0f, 0.5f,
};*/
static GLfloat coordVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
//texture half
/*static GLfloat coordVertices[] = {
0.0f, 1.0f,
0.5f, 1.0f,
0.0f, 0.0f,
0.5f, 0.0f,
};*/
glClearColor(0.0f, 0.0f, 0.0f, 1);
glClear(GL_COLOR_BUFFER_BIT);
GLint tex_y = glGetUniformLocation(simpleProgram, "SamplerY");
GLint tex_u = glGetUniformLocation(simpleProgram, "SamplerU");
GLint tex_v = glGetUniformLocation(simpleProgram, "SamplerV");
//LOGI("tex_y:%d,tex_u:%d,tex_v:%d \n",tex_y,tex_u,tex_v);
ATTRIB_VERTEX = glGetAttribLocation(simpleProgram, "vPosition");
ATTRIB_TEXTURE = glGetAttribLocation(simpleProgram, "a_texCoord");
//LOGI("vertex %d,texture %d",ATTRIB_VERTEX,ATTRIB_TEXTURE);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(ATTRIB_VERTEX);
//LOGI("enableVertexAttribArray vertex");
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, coordVertices);
glEnableVertexAttribArray(ATTRIB_TEXTURE);
//LOGI("enableVertexAttribArray texture");
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, g_texYId);
glUniform1i(tex_y, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, g_texUId);
glUniform1i(tex_u, 1);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, g_texVId);
glUniform1i(tex_v, 2);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
//glutSwapBuffers();//double buffer
//glFlush();//single buffer
}
static GLuint buildShader(const char* source, GLenum shaderType)
{
GLuint shaderHandle = glCreateShader(shaderType);
if (shaderHandle)
{
glShaderSource(shaderHandle, 1, &source, 0);
glCompileShader(shaderHandle);
GLint compiled = 0;
glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compiled);
if (!compiled){
GLint infoLen = 0;
glGetShaderiv(shaderHandle, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen){
char* buf = (char*) malloc(infoLen);
if (buf){
glGetShaderInfoLog(shaderHandle, infoLen, NULL, buf);
LOGE("error::Could not compile shader %d:\n%s\n", shaderType, buf);
free(buf);
}
glDeleteShader(shaderHandle);
shaderHandle = 0;
}
}
}
return shaderHandle;
}
static GLuint buildProgram(const char* vertexShaderSource,
const char* fragmentShaderSource)
{
GLuint vertexShader = buildShader(vertexShaderSource, GL_VERTEX_SHADER);
GLuint fragmentShader = buildShader(fragmentShaderSource, GL_FRAGMENT_SHADER);
GLuint programHandle = glCreateProgram();
if (programHandle)
{
glAttachShader(programHandle, vertexShader);
glAttachShader(programHandle, fragmentShader);
glLinkProgram(programHandle);
GLint linkStatus = GL_FALSE;
glGetProgramiv(programHandle, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(programHandle, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(programHandle, bufLength, NULL, buf);
LOGE("error::Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(programHandle);
programHandle = 0;
}
}
return programHandle;
}
void gl_initialize()
{
LOGI("####gl_initialize###\n");
simpleProgram = buildProgram(VERTEX_SHADER, FRAG_SHADER);
if(!simpleProgram){
LOGE("opengl buildProgram() failed! \n");
return;
}
LOGI("glProgram %d\n",simpleProgram);
glUseProgram(simpleProgram);
glGenTextures(1, &g_texYId);
glGenTextures(1, &g_texUId);
glGenTextures(1, &g_texVId);
LOGI("opengl gentextures end");
}
void gl_uninitialize()
{
LOGI("####gl_uninitialize####");
glDeleteProgram(simpleProgram);
glDeleteTextures(1, &g_texYId);
glDeleteTextures(1, &g_texUId);
glDeleteTextures(1, &g_texVId);
}
void gl_render_frame(const char *buf, int w, int h)
{
if (!buf || w < 0 || h < 0)
{
LOGE("this frame is invalid \n");
return;
}
char *y_buf = (char *)buf;
char *u_buf = y_buf + w * h;
char *v_buf = u_buf + w * h / 4;
gl_viewsize_set(w, h);
glViewport(view_x, view_y, view_width, view_height);
//LOGI("glViewport x,y,width,height=[%d,%d,%d,%d]\n",view_x,view_y,view_width,view_height);
bindTexture(g_texYId, (const char*)y_buf, w, h);
bindTexture(g_texUId, (const char*)u_buf, w/2, h/2);
bindTexture(g_texVId, (const char*)v_buf, w/2, h/2);
renderFrame();
}
void gl_screen_set(int screen_x, int screen_y, int screen_width, int screen_height)
{
s_x = screen_x;
s_y = screen_y;
s_width = screen_width;
s_height = screen_height;
}
void gl_viewsize_set(int frame_width, int frame_height)
{
int view_p = (int)((float)frame_height * 100 / frame_width);
int screen_p = (int)((float)s_height * 100 / s_width);
if (view_p == screen_p) {
view_x = s_x;
view_y = s_y;
view_width = s_width;
view_height = s_height;
}
else if (view_p > screen_p){
view_width = (int)(s_height * 100 / view_p);
view_height = s_height;
view_x = (int)((s_width - view_width) / 2);
view_y = s_y;
}
else if (view_p < screen_p){
view_width = s_width;
view_height = (int)(s_width * view_p / 100) ;
view_x = s_x;
view_y = (int)((s_height - view_height) / 2);
}
}
void gl_imagesize_set(int width, int height)
{
g_width = width;
g_height = height;
}
public class FirstOpenGLProjectJNI {
public static native int AeePlayerSetScreen(int width,int height);
public static native int AeePlayerStart(String url);
public static native int AeePlayerRender();
public static native int AeePlayerStop();
}I want to use
GLSurfaceView
to play video, but the surface is always black. How can I show the video in the GLSurfaceView by useing this jni code.