Recherche avancée

Médias (91)

Autres articles (56)

  • Les tâches Cron régulières de la ferme

    1er décembre 2010, par

    La gestion de la ferme passe par l’exécution à intervalle régulier de plusieurs tâches répétitives dites Cron.
    Le super Cron (gestion_mutu_super_cron)
    Cette tâche, planifiée chaque minute, a pour simple effet d’appeler le Cron de l’ensemble des instances de la mutualisation régulièrement. Couplée avec un Cron système sur le site central de la mutualisation, cela permet de simplement générer des visites régulières sur les différents sites et éviter que les tâches des sites peu visités soient trop (...)

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

  • Le plugin : Podcasts.

    14 juillet 2010, par

    Le problème du podcasting est à nouveau un problème révélateur de la normalisation des transports de données sur Internet.
    Deux formats intéressants existent : Celui développé par Apple, très axé sur l’utilisation d’iTunes dont la SPEC est ici ; Le format "Media RSS Module" qui est plus "libre" notamment soutenu par Yahoo et le logiciel Miro ;
    Types de fichiers supportés dans les flux
    Le format d’Apple n’autorise que les formats suivants dans ses flux : .mp3 audio/mpeg .m4a audio/x-m4a .mp4 (...)

Sur d’autres sites (6304)

  • how can l use this jni code to play video in GLSurfaceView,I can noy find the way to use it

    2 mai 2016, par MrHuang
    #include
    #include
    #include
    #include
    #include <sys></sys>time.h>
    #include

    #include "AeeVideo.h"
    #include "videorender.h"
    #include "decode.h"


    #include
    #include <android></android>log.h>
    #define TAG "AeeVideo"
    #define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, TAG, format, ##__VA_ARGS__)
    #define LOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,  TAG, format, ##__VA_ARGS__)

    static int g_connectstate = -1;
    static DecodeCtx *g_dec_ctx;

    static int     last_width   = 0;
    static int     last_height  = 0;
    static int     last_picsize = 0;
    static uint8_t last_picture[3 * 1024 * 1024];//save color pic

    JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerSetScreen(JNIEnv * env, jobject obj,jint width,jint height)
    {
       gl_screen_set(0,  0, width, height);
       LOGI("and opengl set screen size (%d,%d,%d,%d)\n",0,0,width,height);

       gl_initialize();
       Decode_Init();
       return 0;
    }

    JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerStart(JNIEnv * env, jobject obj, jstring Url)
    {
       const char *pUrl = (*env)->GetStringUTFChars(env, Url, 0);
       LOGI("stream url %s \n",pUrl);

       g_dec_ctx = Decode_OpenStream((char*)pUrl);
       if (!g_dec_ctx) {
           LOGE("openstream %s,failed!\n", pUrl);
           return -1;
       }
       return 0;
    }

    JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerRender(JNIEnv * env, jobject obj)
    {
       if(!g_dec_ctx)
       return -1;

       VideoFrame frame;
       int ret = Decode_ReadFrame(g_dec_ctx, &amp;frame);
       if ( ret &lt;= 0 ) {
           if ( last_picsize > 0 ){
           LOGI("disconnect,render last pic\n");
               gl_render_frame(last_picture, last_width, last_height);
       }
           return ret;
       }
       LOGI("render video frame,pkt w,h:(%d,%d)\n",frame.width,frame.height);
       gl_render_frame(frame.data, frame.width, frame.height);

       if (last_width != frame.width || last_height != frame.height){
           memset(last_picture, 0x80, sizeof(last_picture));       /* gray background */
       }
       last_width   = frame.width;
       last_height  = frame.height;
       last_picsize = frame.size;
       memcpy(last_picture, frame.data, frame.width * frame.height);           /* copy a gray pic */
       return 0;
    }


    JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerStop(JNIEnv * env, jobject obj)
    {
       LOGI("AeePlayer Stop");
       if (g_dec_ctx){
       Decode_CloseStream(g_dec_ctx);
       }
       Decode_Quit();
       gl_uninitialize();
       return 0;
    }

    JNIEXPORT jint JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerSetState(JNIEnv * env, jobject obj, jint state)
    {
       g_connectstate = state;
       LOGI("g_connectstate %d \n",g_connectstate);
       return 0;
    }

    JNIEXPORT jstring JNICALL Java_com_aee_video_FirstOpenGLProjectJNI_AeePlayerGetVersion(JNIEnv * env, jobject obj)
    {
       char v[10]= "1.0.0";
       return (*env)->NewStringUTF(env,v);
    }

    #define _ANDROID_APP_

    #include
    #include
    #include
    #include
    #include
    #include
    #include "videorender.h"

    #ifdef _ANDROID_APP_
    #include <gles2></gles2>gl2.h>
    #include <gles2></gles2>gl2ext.h>
    #include <sys></sys>time.h>
    #include
    #include <android></android>log.h>
    #define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, "VideoRender", format, ##__VA_ARGS__)
    #define LOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,  "VideoRender", format, ##__VA_ARGS__)
    #endif

    #ifdef _IOS_APP_
    #include <opengles></opengles>ES2/gl.h>
    #include <opengles></opengles>ES2/glext.h>
    #endif

    GLint ATTRIB_VERTEX, ATTRIB_TEXTURE;

    static GLuint g_texYId;
    static GLuint g_texUId;
    static GLuint g_texVId;
    static GLuint simpleProgram;

    static int s_x = 0;
    static int s_y = 0;
    static int s_width = 0;
    static int s_height = 0;

    static int view_x = 0;  
    static int view_y = 0;
    static int view_width = 0;
    static int view_height = 0;

    int g_width = 1280;  
    int g_height = 720;


    static const char* FRAG_SHADER =
       "varying lowp vec2 tc;\n"
       "uniform sampler2D SamplerY;\n"
       "uniform sampler2D SamplerU;\n"
       "uniform sampler2D SamplerV;\n"
       "void main(void)\n"
       "{\n"
           "mediump vec3 yuv;\n"
           "lowp vec3 rgb;\n"
           "yuv.x = texture2D(SamplerY, tc).r;\n"
           "yuv.y = texture2D(SamplerU, tc).r - 0.5;\n"
           "yuv.z = texture2D(SamplerV, tc).r - 0.5;\n"
           "rgb = mat3( 1,   1,   1,\n"
                       "0,       -0.39465,  2.03211,\n"
                       "1.13983,   -0.58060,  0) * yuv;\n"
           "gl_FragColor = vec4(rgb, 1);\n"
       "}\n";

    static const char* VERTEX_SHADER =
         "attribute vec4 vPosition;    \n"
         "attribute vec2 a_texCoord;   \n"
         "varying vec2 tc;     \n"
         "void main()                  \n"
         "{                            \n"
         "   gl_Position = vPosition;  \n"
         "   tc = a_texCoord;  \n"
         "}                            \n";

    static GLuint bindTexture(GLuint texture, const char *buffer, GLuint w , GLuint h)
    {
       glBindTexture ( GL_TEXTURE_2D, texture );
       glTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, w, h, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, buffer);
       glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
       glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
       glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
       glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );

       return texture;
    }

    static void renderFrame()
    {
       static GLfloat squareVertices[] = {
           -1.0f, -1.0f,
           1.0f, -1.0f,
           -1.0f,  1.0f,
           1.0f,  1.0f,
       };

       //texture rotate
       /*static GLfloat squareVertices[] = {
           -1.0f, -0.5f,
           0.5f, -1.0f,
           -0.5f,  1.0f,
           1.0f,  0.5f,
       };*/

       static GLfloat coordVertices[] = {
           0.0f, 1.0f,
           1.0f, 1.0f,
           0.0f,  0.0f,
           1.0f,  0.0f,
       };
       //texture half
       /*static GLfloat coordVertices[] = {
           0.0f, 1.0f,
           0.5f, 1.0f,
           0.0f,  0.0f,
           0.5f,  0.0f,
       };*/
       glClearColor(0.0f, 0.0f, 0.0f, 1);
       glClear(GL_COLOR_BUFFER_BIT);

       GLint tex_y = glGetUniformLocation(simpleProgram, "SamplerY");
       GLint tex_u = glGetUniformLocation(simpleProgram, "SamplerU");
       GLint tex_v = glGetUniformLocation(simpleProgram, "SamplerV");
       //LOGI("tex_y:%d,tex_u:%d,tex_v:%d \n",tex_y,tex_u,tex_v);

       ATTRIB_VERTEX = glGetAttribLocation(simpleProgram, "vPosition");
       ATTRIB_TEXTURE = glGetAttribLocation(simpleProgram, "a_texCoord");
       //LOGI("vertex %d,texture %d",ATTRIB_VERTEX,ATTRIB_TEXTURE);
       glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
       glEnableVertexAttribArray(ATTRIB_VERTEX);
       //LOGI("enableVertexAttribArray vertex");

       glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, coordVertices);
       glEnableVertexAttribArray(ATTRIB_TEXTURE);
       //LOGI("enableVertexAttribArray texture");

       glActiveTexture(GL_TEXTURE0);
       glBindTexture(GL_TEXTURE_2D, g_texYId);
       glUniform1i(tex_y, 0);

       glActiveTexture(GL_TEXTURE1);
       glBindTexture(GL_TEXTURE_2D, g_texUId);
       glUniform1i(tex_u, 1);

       glActiveTexture(GL_TEXTURE2);
       glBindTexture(GL_TEXTURE_2D, g_texVId);
       glUniform1i(tex_v, 2);

       glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
       //glutSwapBuffers();//double buffer
       //glFlush();//single buffer
    }

    static GLuint buildShader(const char* source, GLenum shaderType)
    {
       GLuint shaderHandle = glCreateShader(shaderType);

       if (shaderHandle)
       {
           glShaderSource(shaderHandle, 1, &amp;source, 0);
           glCompileShader(shaderHandle);
           GLint compiled = 0;
           glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &amp;compiled);
           if (!compiled){
               GLint infoLen = 0;
               glGetShaderiv(shaderHandle, GL_INFO_LOG_LENGTH, &amp;infoLen);
               if (infoLen){
                   char* buf = (char*) malloc(infoLen);
                   if (buf){
                       glGetShaderInfoLog(shaderHandle, infoLen, NULL, buf);
                       LOGE("error::Could not compile shader %d:\n%s\n", shaderType, buf);
                       free(buf);
                   }
                   glDeleteShader(shaderHandle);
                   shaderHandle = 0;
               }
           }
       }

       return shaderHandle;
    }

    static GLuint buildProgram(const char* vertexShaderSource,
           const char* fragmentShaderSource)
    {
       GLuint vertexShader = buildShader(vertexShaderSource, GL_VERTEX_SHADER);
       GLuint fragmentShader = buildShader(fragmentShaderSource, GL_FRAGMENT_SHADER);
       GLuint programHandle = glCreateProgram();

       if (programHandle)
       {
           glAttachShader(programHandle, vertexShader);
           glAttachShader(programHandle, fragmentShader);
           glLinkProgram(programHandle);
           GLint linkStatus = GL_FALSE;
           glGetProgramiv(programHandle, GL_LINK_STATUS, &amp;linkStatus);
           if (linkStatus != GL_TRUE) {
               GLint bufLength = 0;
               glGetProgramiv(programHandle, GL_INFO_LOG_LENGTH, &amp;bufLength);
               if (bufLength) {
                   char* buf = (char*) malloc(bufLength);
                   if (buf) {
                       glGetProgramInfoLog(programHandle, bufLength, NULL, buf);
                       LOGE("error::Could not link program:\n%s\n", buf);
                       free(buf);
                   }
               }
               glDeleteProgram(programHandle);
               programHandle = 0;
           }
       }

       return programHandle;
    }

    void gl_initialize()
    {
       LOGI("####gl_initialize###\n");
       simpleProgram = buildProgram(VERTEX_SHADER, FRAG_SHADER);
       if(!simpleProgram){
           LOGE("opengl buildProgram() failed! \n");
           return;
       }
       LOGI("glProgram %d\n",simpleProgram);
       glUseProgram(simpleProgram);
       glGenTextures(1, &amp;g_texYId);
       glGenTextures(1, &amp;g_texUId);
       glGenTextures(1, &amp;g_texVId);
       LOGI("opengl gentextures end");
    }

    void gl_uninitialize()
    {
       LOGI("####gl_uninitialize####");
       glDeleteProgram(simpleProgram);
       glDeleteTextures(1, &amp;g_texYId);
       glDeleteTextures(1, &amp;g_texUId);
       glDeleteTextures(1, &amp;g_texVId);
    }

    void gl_render_frame(const char *buf, int w, int h)
    {
       if (!buf || w &lt; 0 || h &lt; 0)
       {
           LOGE("this frame is invalid \n");
           return;
       }
       char *y_buf = (char *)buf;
       char *u_buf = y_buf + w * h;
       char *v_buf = u_buf + w * h / 4;
       gl_viewsize_set(w, h);
       glViewport(view_x, view_y, view_width, view_height);
       //LOGI("glViewport x,y,width,height=[%d,%d,%d,%d]\n",view_x,view_y,view_width,view_height);
       bindTexture(g_texYId, (const char*)y_buf, w, h);
       bindTexture(g_texUId, (const char*)u_buf, w/2, h/2);
       bindTexture(g_texVId, (const char*)v_buf, w/2, h/2);
       renderFrame();
    }

    void gl_screen_set(int screen_x, int screen_y, int screen_width, int screen_height)
    {
       s_x = screen_x;
       s_y = screen_y;
       s_width = screen_width;
       s_height = screen_height;
    }

    void gl_viewsize_set(int frame_width, int frame_height)
    {
       int view_p = (int)((float)frame_height * 100 / frame_width);
       int screen_p = (int)((float)s_height * 100 / s_width);
       if (view_p == screen_p) {
           view_x = s_x;
           view_y = s_y;
           view_width = s_width;
           view_height = s_height;
       }
       else if (view_p > screen_p){
           view_width = (int)(s_height * 100 / view_p);
           view_height = s_height;
           view_x = (int)((s_width - view_width) / 2);
           view_y = s_y;
       }
       else if (view_p &lt; screen_p){
           view_width = s_width;
           view_height = (int)(s_width * view_p / 100) ;
           view_x = s_x;
           view_y = (int)((s_height - view_height) / 2);
       }
    }

    void gl_imagesize_set(int width, int height)
    {
       g_width = width;
       g_height = height;
    }

    public class FirstOpenGLProjectJNI {

        public static native int AeePlayerSetScreen(int width,int height);

        public static native int AeePlayerStart(String url);

        public static native int AeePlayerRender();

        public static native int AeePlayerStop();

    }

    I want to use GLSurfaceView to play video, but the surface is always black. How can I show the video in the GLSurfaceView by useing this jni code.

  • use ffmpeg in android studio2.0 can not find file

    29 avril 2016, par kemp

    I successfully generated android folder with arm/lib and arm/include files
    like this :
    enter image description here

    And then I created one Android.mk file in /Users/whyus/Library/Android/sdk/ndk-bundle/sources/ffmpeg/android/arm one Android.mk in my android project (/Users/whyus/Downloads/FFmpeg/app/build/intermediates/ndk/debug/)
    and my gradle is like this :
    enter image description here

    but when I build the c file,it can not find the include head

  • ffmpeg unable to find encoder 'libvorbis'

    15 juillet 2014, par scientiffic

    I’m unable to transcode a video file to webm because my machine isn’t able to find libvorbis. When I run the following command :

    ffmpeg -y -i /public/uploads/tmp/1382112521-11953-7661/webm_Untitled.mov -vcodec libvpx -acodec libvorbis -s 640x360  -b 1500k -ab 160000 -f webm -g 30 -aspect 1.7777777777777777 /public/uploads/tmp/1382112521-11953-7661/tmpfile.webm

    I get the error

    Unknown encoder 'libvorbis'

    I already have libvorbis installed on my machine, though (When I try "brew install libvorbis, it tells me : Warning : libvorbis-1.3.3 already installed).

    How can I solve this problem ?

    Here is the full log :

    ffmpeg -y -i /public/uploads/tmp/1382112521-11953-7661/webm_Untitled.mov -vcodec libvpx -acodec libvorbis -s 640x360  -b 1500k -ab 160000 -f webm -g 30 -aspect 1.7777777777777777 /public/uploads/tmp/1382112521-11953-7661/tmpfile.webm
    ffmpeg version 1.2.1 Copyright (c) 2000-2013 the FFmpeg developers
     built on Oct 18 2013 12:20:19 with Apple LLVM version 4.2 (clang-425.0.28) (based on LLVM 3.2svn)
     configuration: --prefix=/usr/local/Cellar/ffmpeg/1.2.1 --enable-shared --enable-pthreads --enable-gpl --enable-version3 --enable-nonfree --enable-hardcoded-tables --enable-avresample --enable-vda --cc=cc --host-cflags= --host-ldflags= --enable-libx264 --enable-libfaac --enable-libmp3lame --enable-libxvid --enable-libvpx
     libavutil      52. 18.100 / 52. 18.100
     libavcodec     54. 92.100 / 54. 92.100
     libavformat    54. 63.104 / 54. 63.104
     libavdevice    54.  3.103 / 54.  3.103
     libavfilter     3. 42.103 /  3. 42.103
     libswscale      2.  2.100 /  2.  2.100
     libswresample   0. 17.102 /  0. 17.102
     libpostproc    52.  2.100 / 52.  2.100
    Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/public/uploads/tmp/1382112521-11953-7661/webm_Untitled.mov':
     Metadata:
       major_brand     : qt  
       minor_version   : 0
       compatible_brands: qt  
       creation_time   : 2013-09-27 20:08:57
     Duration: 00:00:02.14, start: 0.000000, bitrate: 17460 kb/s
       Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 1920x1080, 17292 kb/s, 29.88 fps, 29.83 tbr, 90k tbn, 180k tbc
       Metadata:
         creation_time   : 2013-09-27 20:08:57
         handler_name    : Core Media Data Handler
       Stream #0:1(und): Audio: aac (mp4a / 0x6134706D), 48000 Hz, stereo, fltp, 123 kb/s
       Metadata:
         creation_time   : 2013-09-27 20:08:57
         handler_name    : Core Media Data Handler
    Please use -b:a or -b:v, -b is ambiguous
    Unknown encoder 'libvorbis'