Recherche avancée

Médias (1)

Mot : - Tags -/biomaping

Autres articles (111)

  • MediaSPIP version 0.1 Beta

    16 avril 2011, par

    MediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
    Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
    Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
    Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)

  • MediaSPIP 0.1 Beta version

    25 avril 2011, par

    MediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
    The zip file provided here only contains the sources of MediaSPIP in its standalone version.
    To get a working installation, you must manually install all-software dependencies on the server.
    If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...)

  • Amélioration de la version de base

    13 septembre 2013

    Jolie sélection multiple
    Le plugin Chosen permet d’améliorer l’ergonomie des champs de sélection multiple. Voir les deux images suivantes pour comparer.
    Il suffit pour cela d’activer le plugin Chosen (Configuration générale du site > Gestion des plugins), puis de configurer le plugin (Les squelettes > Chosen) en activant l’utilisation de Chosen dans le site public et en spécifiant les éléments de formulaires à améliorer, par exemple select[multiple] pour les listes à sélection multiple (...)

Sur d’autres sites (4915)

  • openGL ES 2.0 on android , YUV to RGB and Rendering with ffMpeg

    14 octobre 2013, par 101110101100111111101101

    My renderer dies 1 2 frames later when video shows after.

    FATAL ERROR 11 : blabla...(Exactly occurs in glDrawElements (Y part))

    I think problem is 'glPixelStorei' or 'GL_RGB', 'GL_LUMINANCE' but.. I don't get it.

    My rendering way :

    1. Decode data that got from network, (SDK Getting-> NDK Decoding), Enqueueing.

    2. Dequeueing another threads (of course synchronized) get ready to setup OpenGL ES 2.0.(SDK)

    3. When onDrawFrame, onSurfaceCreated, onSurfaceChanged methods are called, it shrink down to NDK. (My Renderer source in NDK will attach below.)

    4. Rendering.

    As you know, Fragment shader is using for conversion.
    My Data is YUV 420p (pix_fmt_YUV420p) (12bit per pixel)

    Here is my entire source.

    I haven't any knowledge about OpenGL ES before, this is first time.

    Please let me know what am I do improving performance.

    and What am I use parameters in 'glTexImage2D', 'glTexSubImage2D', 'glRenderbufferStorage' ????
    GL_LUMINANCE ? GL_RGBA ? GL_RGB ? (GL_LUMINANCE is using now)

    void Renderer::set_draw_frame(JNIEnv* jenv, jbyteArray yData, jbyteArray uData, jbyteArray vData)
    {
       for (int i = 0; i < 3; i++) {
           if (yuv_data_[i] != NULL) {
               free(yuv_data_[i]);
           }
       }

     int YSIZE = -1;
     int USIZE = -1;
     int VSIZE = -1;

     if (yData != NULL) {
           YSIZE = (int)jenv->GetArrayLength(yData);
       LOG_DEBUG("YSIZE : %d", YSIZE);
           yuv_data_[0] = (unsigned char*)malloc(sizeof(unsigned char) * YSIZE);
       memset(yuv_data_[0], 0, YSIZE);
           jenv->GetByteArrayRegion(yData, 0, YSIZE, (jbyte*)yuv_data_[0]);
       yuv_data_[0] = reinterpret_cast<unsigned>(yuv_data_[0]);
       } else {
           YSIZE = (int)jenv->GetArrayLength(yData);
           yuv_data_[0] = (unsigned char*)malloc(sizeof(unsigned char) * YSIZE);
       memset(yuv_data_[0], 1, YSIZE);
     }

       if (uData != NULL) {
           USIZE = (int)jenv->GetArrayLength(uData);
       LOG_DEBUG("USIZE : %d", USIZE);
           yuv_data_[1] = (unsigned char*)malloc(sizeof(unsigned char) * USIZE);
       memset(yuv_data_[1], 0, USIZE);
           jenv->GetByteArrayRegion(uData, 0, USIZE, (jbyte*)yuv_data_[1]);
       yuv_data_[1] = reinterpret_cast<unsigned>(yuv_data_[1]);
       } else {
           USIZE = YSIZE/4;
           yuv_data_[1] = (unsigned char*)malloc(sizeof(unsigned char) * USIZE);
       memset(yuv_data_[1], 1, USIZE);
     }

       if (vData != NULL) {
           VSIZE = (int)jenv->GetArrayLength(vData);
       LOG_DEBUG("VSIZE : %d", VSIZE);
           yuv_data_[2] = (unsigned char*)malloc(sizeof(unsigned char) * VSIZE);
       memset(yuv_data_[2], 0, VSIZE);
           jenv->GetByteArrayRegion(vData, 0, VSIZE, (jbyte*)yuv_data_[2]);
       yuv_data_[2] = reinterpret_cast<unsigned>(yuv_data_[2]);
       } else {
           VSIZE = YSIZE/4;
           yuv_data_[2] = (unsigned char*)malloc(sizeof(unsigned char) * VSIZE);
       memset(yuv_data_[2], 1, VSIZE);
     }

       glClearColor(1.0F, 1.0F, 1.0F, 1.0F);
       check_gl_error("glClearColor");
       glClear(GL_COLOR_BUFFER_BIT);
       check_gl_error("glClear");
    }

    void Renderer::draw_frame()
    {
     // Binding created FBO
     glBindFramebuffer(GL_FRAMEBUFFER, frame_buffer_object_);
     check_gl_error("glBindFramebuffer");
       // Add program to OpenGL environment
       glUseProgram(program_object_);
       check_gl_error("glUseProgram");

     for (int i = 0; i &lt; 3; i++) {
       LOG_DEBUG("Success");
         //Bind texture
         glActiveTexture(GL_TEXTURE0 + i);
         check_gl_error("glActiveTexture");
         glBindTexture(GL_TEXTURE_2D, yuv_texture_id_[i]);
         check_gl_error("glBindTexture");
         glUniform1i(yuv_texture_object_[i], i);
         check_gl_error("glBindTexture");
       glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, stream_yuv_width_[i], stream_yuv_height_[i], GL_RGBA, GL_UNSIGNED_BYTE, yuv_data_[i]);
         check_gl_error("glTexSubImage2D");
     }

     LOG_DEBUG("Success");
       // Load vertex information
       glVertexAttribPointer(position_object_, 2, GL_FLOAT, GL_FALSE, kStride, kVertexInformation);
       check_gl_error("glVertexAttribPointer");
       // Load texture information
       glVertexAttribPointer(texture_position_object_, 2, GL_SHORT, GL_FALSE, kStride, kTextureCoordinateInformation);
       check_gl_error("glVertexAttribPointer");

    LOG_DEBUG("9");
       glEnableVertexAttribArray(position_object_);
       check_gl_error("glEnableVertexAttribArray");
       glEnableVertexAttribArray(texture_position_object_);
       check_gl_error("glEnableVertexAttribArray");

     // Back to window buffer
     glBindFramebuffer(GL_FRAMEBUFFER, 0);
     check_gl_error("glBindFramebuffer");
     LOG_DEBUG("Success");
       // Draw the Square
       glDrawElements(GL_TRIANGLE_STRIP, 6, GL_UNSIGNED_SHORT, kIndicesInformation);
       check_gl_error("glDrawElements");
    }

    void Renderer::setup_render_to_texture()
    {
       glGenFramebuffers(1, &amp;frame_buffer_object_);
       check_gl_error("glGenFramebuffers");
       glBindFramebuffer(GL_FRAMEBUFFER, frame_buffer_object_);
       check_gl_error("glBindFramebuffer");
       glGenRenderbuffers(1, &amp;render_buffer_object_);
       check_gl_error("glGenRenderbuffers");
       glBindRenderbuffer(GL_RENDERBUFFER, render_buffer_object_);
       check_gl_error("glBindRenderbuffer");
       glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, stream_yuv_width_[0], stream_yuv_height_[0]);
       check_gl_error("glRenderbufferStorage");
       glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, render_buffer_object_);
       check_gl_error("glFramebufferRenderbuffer");
     glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id_[0], 0);
       check_gl_error("glFramebufferTexture2D");  
     glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id_[1], 0);
       check_gl_error("glFramebufferTexture2D");  
     glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id_[2], 0);
       check_gl_error("glFramebufferTexture2D");  

     glBindFramebuffer(GL_FRAMEBUFFER, 0);
       check_gl_error("glBindFramebuffer");

       GLint status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
       if (status != GL_FRAMEBUFFER_COMPLETE) {
           print_log("renderer.cpp", "setup_graphics", "FBO setting fault.", LOGERROR);
           LOG_ERROR("%d\n", status);
           return;
       }
    }

    void Renderer::setup_yuv_texture()
    {
       // Use tightly packed data
       glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
       check_gl_error("glPixelStorei");

     for (int i = 0; i &lt; 3; i++) {
       if (yuv_texture_id_[i]) {
         glDeleteTextures(1, &amp;yuv_texture_id_[i]);
         check_gl_error("glDeleteTextures");
       }
         glActiveTexture(GL_TEXTURE0+i);
         check_gl_error("glActiveTexture");
         // Generate texture object
         glGenTextures(1, &amp;yuv_texture_id_[i]);
         check_gl_error("glGenTextures");
         glBindTexture(GL_TEXTURE_2D, yuv_texture_id_[i]);
         check_gl_error("glBindTexture");
       glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
         check_gl_error("glTexParameteri");
         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
         check_gl_error("glTexParameteri");
         glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
         check_gl_error("glTexParameterf");
         glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
         check_gl_error("glTexParameterf");
       glEnable(GL_TEXTURE_2D);
       check_gl_error("glEnable");
       glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, maximum_yuv_width_[i], maximum_yuv_height_[i], 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
         check_gl_error("glTexImage2D");
     }
    }

    void Renderer::setup_graphics()
    {
       print_gl_string("Version", GL_VERSION);
       print_gl_string("Vendor", GL_VENDOR);
       print_gl_string("Renderer", GL_RENDERER);
       print_gl_string("Extensions", GL_EXTENSIONS);

       program_object_ = create_program(kVertexShader, kFragmentShader);
       if (!program_object_) {
           print_log("renderer.cpp", "setup_graphics", "Could not create program.", LOGERROR);
           return;
       }

       position_object_ = glGetAttribLocation(program_object_, "vPosition");
       check_gl_error("glGetAttribLocation");
       texture_position_object_ = glGetAttribLocation(program_object_, "vTexCoord");
       check_gl_error("glGetAttribLocation");

       yuv_texture_object_[0] = glGetUniformLocation(program_object_, "yTexture");
       check_gl_error("glGetUniformLocation");
     yuv_texture_object_[1] = glGetUniformLocation(program_object_, "uTexture");
       check_gl_error("glGetUniformLocation");
       yuv_texture_object_[2] = glGetUniformLocation(program_object_, "vTexture");
       check_gl_error("glGetUniformLocation");

     setup_yuv_texture();
       setup_render_to_texture();

     glViewport(0, 0, stream_yuv_width_[0], stream_yuv_height_[0]);//736, 480);//1920, 1080);//maximum_yuv_width_[0], maximum_yuv_height_[0]);
     check_gl_error("glViewport");
    }

    GLuint Renderer::create_program(const char* vertex_source, const char* fragment_source)
    {
       GLuint vertexShader = load_shader(GL_VERTEX_SHADER, vertex_source);
       if (!vertexShader) {
           return 0;
       }

       GLuint pixelShader = load_shader(GL_FRAGMENT_SHADER, fragment_source);
       if (!pixelShader) {
           return 0;
       }

       GLuint program = glCreateProgram();
       if (program) {
           glAttachShader(program, vertexShader);
           check_gl_error("glAttachShader");
           glAttachShader(program, pixelShader);
           check_gl_error("glAttachShader");
           glLinkProgram(program);
           /* Get a Status */
           GLint linkStatus = GL_FALSE;
           glGetProgramiv(program, GL_LINK_STATUS, &amp;linkStatus);
           if (linkStatus != GL_TRUE) {
               GLint bufLength = 0;
               glGetProgramiv(program, GL_INFO_LOG_LENGTH, &amp;bufLength);
               if (bufLength) {
                   char* buf = (char*) malloc(bufLength);
                   if (buf) {
                       glGetProgramInfoLog(program, bufLength, NULL, buf);
                       print_log("renderer.cpp", "create_program", "Could not link program.", LOGERROR);
                       LOG_ERROR("%s\n", buf);
                       free(buf);
                   }
               }
               glDeleteProgram(program);
               program = 0;
           }
       }
       return program;
    }

    GLuint Renderer::load_shader(GLenum shaderType, const char* pSource)
    {
       GLuint shader = glCreateShader(shaderType);
           if (shader) {
               glShaderSource(shader, 1, &amp;pSource, NULL);
               glCompileShader(shader);
               /* Get a Status */
               GLint compiled = 0;
               glGetShaderiv(shader, GL_COMPILE_STATUS, &amp;compiled);
               if (!compiled) {
                   GLint infoLen = 0;
                   glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &amp;infoLen);
                   if (infoLen) {
                       char* buf = (char*) malloc(infoLen);
                       if (buf) {
                           glGetShaderInfoLog(shader, infoLen, NULL, buf);
                           print_log("renderer.cpp", "load_shader", "Could not link program.", LOGERROR);
                                     LOG_ERROR("%d :: %s\n", shaderType, buf);
                           free(buf);
                       }
                       glDeleteShader(shader);
                       shader = 0;
                   }
               }
           }
       return shader;
    }


    void Renderer::onDrawFrame(JNIEnv* jenv, jbyteArray yData, jbyteArray uData, jbyteArray vData)
    {
       set_draw_frame(jenv, yData, uData, vData);
       draw_frame();
       return;
    }

    void Renderer::setSize(int stream_width, int stream_height) {
     stream_yuv_width_[0] = stream_width;
     stream_yuv_width_[1] = stream_width/2;
     stream_yuv_width_[2] = stream_width/2;
     stream_yuv_height_[0] = stream_height;
     stream_yuv_height_[1] = stream_height/2;
     stream_yuv_height_[2] = stream_height/2;
    }

    void Renderer::onSurfaceChanged(int width, int height)
    {
     mobile_yuv_width_[0] = width;
     mobile_yuv_width_[1] = width/2;
     mobile_yuv_width_[2] = width/2;
     mobile_yuv_height_[0] = height;
     mobile_yuv_height_[1] = height/2;
     mobile_yuv_height_[2] = height/2;

     maximum_yuv_width_[0] = 1920;
     maximum_yuv_width_[1] = 1920/2;
     maximum_yuv_width_[2] = 1920/2;
     maximum_yuv_height_[0] = 1080;
     maximum_yuv_height_[1] = 1080/2;
     maximum_yuv_height_[2] = 1080/2;

     // If stream size not setting, default size D1
     //if (stream_yuv_width_[0] == 0) {
       stream_yuv_width_[0] = 736;
       stream_yuv_width_[1] = 736/2;
       stream_yuv_width_[2] = 736/2;
       stream_yuv_height_[0] = 480;
       stream_yuv_height_[1] = 480/2;
       stream_yuv_height_[2] = 480/2;
     //}

       setup_graphics();
       return;
    }
    </unsigned></unsigned></unsigned>

    Here is my Fragment, Vertex source and coordinates :

    static const char kVertexShader[] =
       "attribute vec4 vPosition;      \n"
         "attribute vec2 vTexCoord;        \n"
         "varying vec2 v_vTexCoord;        \n"
       "void main() {                        \n"
           "gl_Position = vPosition;       \n"
           "v_vTexCoord = vTexCoord;       \n"
       "}                                          \n";

    static const char kFragmentShader[] =
           "precision mediump float;               \n"
           "varying vec2 v_vTexCoord;          \n"
           "uniform sampler2D yTexture;        \n"
           "uniform sampler2D uTexture;        \n"
           "uniform sampler2D vTexture;        \n"
           "void main() {                      \n"
               "float y=texture2D(yTexture, v_vTexCoord).r;\n"
               "float u=texture2D(uTexture, v_vTexCoord).r - 0.5;\n"
               "float v=texture2D(vTexture, v_vTexCoord).r - 0.5;\n"
               "float r=y + 1.13983 * v;\n"
               "float g=y - 0.39465 * u - 0.58060 * v;\n"
               "float b=y + 2.03211 * u;\n"
               "gl_FragColor = vec4(r, g, b, 1.0);\n"
           "}\n";

    static const GLfloat kVertexInformation[] =
    {
            -1.0f, 1.0f,           // TexCoord 0 top left
            -1.0f,-1.0f,           // TexCoord 1 bottom left
             1.0f,-1.0f,           // TexCoord 2 bottom right
             1.0f, 1.0f            // TexCoord 3 top right
    };
    static const GLshort kTextureCoordinateInformation[] =
    {
             0, 0,         // TexCoord 0 top left
             0, 1,         // TexCoord 1 bottom left
             1, 1,         // TexCoord 2 bottom right
             1, 0          // TexCoord 3 top right
    };
    static const GLuint kStride = 0;//COORDS_PER_VERTEX * 4;
    static const GLshort kIndicesInformation[] =
    {
       0, 1, 2,
       0, 2, 3
    };
  • Adding picture to video using FFmpeg through android ndk

    10 octobre 2013, par Papajohn000

    I was wondering how you would go about adding a picture to the video at a certain location in the video using ffmpeg and android ndk. I want to the picture to be visable for a couple of seconds so it will need to be added in a loop.

  • Android FFmpegPlayer Streaming Service onClick notification

    8 octobre 2013, par agony

    I have a MainActivity class that displays the list of streams available for my project and the StreamingActivity class where the streaming is done.

    If the user selected an item from the list it will start the StreamingActivity and start playing the stream.
    I'm having trouble to continue streaming music when the user pressed the notification and returning it to the StreamingActivity class if the user pressed or clicked the home menu or when the app goes to onDestroy().

    I'm using FFmpegPlayer for my project 'coz it requires to play mms :// live streams for local FM station.

    Here's my code :

    public class StreamingActivity extends BaseActivity  implements ActionBar.TabListener,
    PlayerControlListener, IMediaPlayerServiceClient {


    private StatefulMediaPlayer mMediaPlayer;
    private FFmpegService mService;
    private boolean mBound;

    public static final String TAG = "StationActivity";

    private static Bundle mSavedInstanceState;

    private static PlayerFragment mPlayerFragment;
    private static DJListFragment mDjListFragment;

    private SectionsPagerAdapter mSectionsPagerAdapter;
    private ViewPager mViewPager;

    private String stream = "";
    private String fhz = "";
    private String page = "0";

    private Dialog shareDialog;
       private ProgressDialog dialog;

    private boolean isStreaming;


    /*************************************************************************************************************/

    @Override
    public void onCreate(Bundle savedInstanceState){
       super.onCreate(savedInstanceState);
       setContentView(R.layout.activity_station);

       Bundle bundle = getIntent().getExtras();
       if(bundle !=null){
           fhz = bundle.getString("fhz");
           stream = bundle.getString("stream");
       }

       Log.d(TAG, "page: " + page + " fhz: " + fhz + " stream: " + stream + " isStreaming: " + isStreaming);

       getSupportActionBar().setTitle("Radio \n" + fhz);
       getSupportActionBar().setDisplayHomeAsUpEnabled(true);
       getSupportActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);

       mPlayerFragment = (PlayerFragment) Fragment.instantiate(this, PlayerFragment.class.getName(), null);
       mDjListFragment = (DJListFragment) Fragment.instantiate(this, DJListFragment.class.getName(), null);

       mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager());

       mViewPager = (ViewPager) findViewById(R.id.pager);
       mViewPager.setAdapter(mSectionsPagerAdapter);
       mViewPager.setCurrentItem(Integer.parseInt(page));

       mSavedInstanceState = savedInstanceState;

       Tab playingTab = getSupportActionBar().newTab();
       playingTab.setText(getString(R.string.playing_label));
       playingTab.setTabListener(this);

       Tab djTab = getSupportActionBar().newTab();
       djTab.setText(getString(R.string.dj_label));
       djTab.setTabListener(this);

       getSupportActionBar().addTab(playingTab);
       getSupportActionBar().addTab(djTab);

       // When swiping between different sections, select the corresponding
       // tab. We can also use ActionBar.Tab#select() to do this if we have
       // a reference to the Tab.
       mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() {
           @Override
           public void onPageSelected(int position) {
               StationActivity.this.getSupportActionBar().setSelectedNavigationItem(position);
           }
       });

       if (mSavedInstanceState != null) {
           getSupportActionBar().setSelectedNavigationItem(mSavedInstanceState.getInt("tab", 0));
       }

       dialog = new ProgressDialog(this);

       bindToService();

       UriBean.getInstance().setStream(stream);
       Log.d(TAG ,"stream: " + UriBean.getInstance().getStream());

    }

    /********************************************************************************************************/

    public class SectionsPagerAdapter extends FragmentPagerAdapter {
       public SectionsPagerAdapter(FragmentManager fm) {
           super(fm);
       }

       @Override
       public Fragment getItem(int position) {
           if (position == 0) {
               return mPlayerFragment;
           } else {
               return mDjListFragment;
           }
       }

       @Override
       public int getCount() {
           return 2;
       }
    }

    @Override
    public void onTabSelected(Tab tab, FragmentTransaction ft) {
       // When the given tab is selected, switch to the corresponding page in the ViewPager.
       mViewPager.setCurrentItem(tab.getPosition());
    }

    @Override
    public void onTabUnselected(Tab tab, FragmentTransaction ft) { }

    @Override
    public void onTabReselected(Tab tab, FragmentTransaction ft) { }

    /********************************************************************************************************/

    public void showLoadingDialog() {
       dialog.setMessage("Buffering...");
       dialog.show();
    }

    public void dismissLoadingDialog() {
       dialog.dismiss();
    }

    /********************************************************************************************************/

    /**
    * Binds to the instance of MediaPlayerService. If no instance of MediaPlayerService exists, it first starts
    * a new instance of the service.
    */
    public void bindToService() {
       Intent intent = new Intent(this, FFmpegService.class);

       if (Util.isFFmpegServiceRunning(getApplicationContext())){
           // Bind to Service
           Log.i(TAG, "bindService");
           bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
       } else {
           //start service and bind to it
           Log.i(TAG, "startService &amp; bindService");
           startService(intent);
           bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
       }
    }


    /**
    * Defines callbacks for service binding, passed to bindService()
    */
    private ServiceConnection mConnection = new ServiceConnection() {
       @Override
       public void onServiceConnected(ComponentName className, IBinder serviceBinder) {
           Log.d(TAG,"service connected");

           //bound with Service. get Service instance
           MediaPlayerBinder binder = (FFmpegService.MediaPlayerBinder) serviceBinder;
           mService = binder.getService();

           //send this instance to the service, so it can make callbacks on this instance as a client
           mService.setClient(StationActivity.this);
           mBound = true;

           Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());

           //if

           startStreaming();
       }

       @Override
       public void onServiceDisconnected(ComponentName arg0) {
           mBound = false;
           mService = null;
       }
    };

    /********************************************************************************************************/

    @Override
    public void onPlayerPlayStop() {
       Log.d(TAG, "onPlayerPlayStop");

       Log.v(TAG, "isStreaming: " + isStreaming);
       Log.v(TAG, "mBound:  " + mBound);

       if (mBound) {
           Log.d(TAG, "bound.............");
           mMediaPlayer = mService.getMediaPlayer();
           //pressed pause ->pause
           if (!PlayerFragment.play.isChecked()) {
               if (mMediaPlayer.isStarted()) {
                   Log.d(TAG, "pause");
                   mService.pauseMediaPlayer();
               }
           } else { //pressed play
               // STOPPED, CREATED, EMPTY, -> initialize
               if (mMediaPlayer.isStopped() || mMediaPlayer.isCreated() || mMediaPlayer.isEmpty()) {
                   startStreaming();
               } else if (mMediaPlayer.isPrepared() || mMediaPlayer.isPaused()) { //prepared, paused -> resume play
                   Log.d(TAG, "start");
                   mService.startMediaPlayer();
               }
           }

           Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
       }
    }

    /********************************************************************************************************/

    @Override
    public void onDownload() {
       Toast.makeText(this, "Not yet available...", Toast.LENGTH_SHORT).show();
    }

    @Override
    public void onComment() {
       FragmentManager fm = getSupportFragmentManager();
       DialogFragment newFragment = MyAlertDialogFragment.newInstance();
       newFragment.show(fm, "comment_dialog");
    }

    @Override
    public void onShare() {
       showShareDialog();
    }

    /********************************************************************************************************/

    private void startStreaming() {
       Log.d(TAG, "@startLoading");
       boolean isNetworkFound = Util.checkConnectivity(getApplicationContext());
       if(isNetworkFound) {
           Log.d(TAG, "network found");
           mService.initializePlayer(stream);
           isStreaming = true;
       } else {
           Toast.makeText(getApplicationContext(), "No internet connection found...", Toast.LENGTH_SHORT).show();
       }

       Log.d(TAG, "isStreaming: " + isStreaming);
       Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
    }

    @Override
    public void onInitializePlayerStart() {
       showLoadingDialog();
    }

    @Override
    public void onInitializePlayerSuccess() {
       dismissLoadingDialog();
       PlayerFragment.play.setChecked(true);


       Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
    }

    @Override
    public void onError() {
       Toast.makeText(getApplicationContext(), "Not connected to the server...", Toast.LENGTH_SHORT).show();
    }

       @Override
    public void onDestroy() {
       Log.d(TAG, "onDestroy");
       super.onDestroy();
       uiHelper.onDestroy();

       Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
       if (mBound) {
           mService.unRegister();
           unbindService(mConnection);
           mBound = false;
       }

       Log.d(TAG, "service: " + Util.isFFmpegServiceRunning(getApplicationContext()));
    }

    @Override
    public void onStop(){
       Log.d(TAG, "onStop");
       super.onStop();
    }

    /*******************************************************************************************************/

    @Override
    public boolean onOptionsItemSelected(MenuItem item) {
       int itemId = item.getItemId();
       switch (itemId){
       case android.R.id.home:
           onBackPressed();
           break;
       default:
           break;
       }    
       return true;
    }

    @Override
    public boolean onKeyDown(int keyCode, KeyEvent event) {
       Log.d(TAG, "@onKeyDown");
       if (keyCode == KeyEvent.KEYCODE_BACK &amp;&amp; event.getRepeatCount() == 0){
           //this.moveTaskToBack(true);
           onBackPressed();
           return true;
       }
       return super.onKeyDown(keyCode, event);
    }
    }






    public class FFmpegService  extends Service implements IMediaPlayerThreadClient {

    private FFmpegPlayerThread mMediaPlayerThread       = new FFmpegPlayerThread(this);
    private final Binder mBinder                        = new MediaPlayerBinder();
    private IMediaPlayerServiceClient mClient;
    //private StreamStation mCurrentStation;

    private boolean mIsSupposedToBePlaying = false;

    private boolean isPausedInCall = false;
    private PhoneStateListener phoneStateListener;
    private TelephonyManager telephonyManager;

    @Override
    public void onCreate(){
       mMediaPlayerThread.start();
    }

    /**
    * A class for clients binding to this service. The client will be passed an object of this class
    * via its onServiceConnected(ComponentName, IBinder) callback.
    */
    public class MediaPlayerBinder extends Binder {
       /**
        * Returns the instance of this service for a client to make method calls on it.
        * @return the instance of this service.
        */
       public FFmpegService getService() {
           return FFmpegService.this;
       }
    }

    /**
    * Returns the contained StatefulMediaPlayer
    * @return
    */
    public StatefulMediaPlayer getMediaPlayer() {
       return mMediaPlayerThread.getMediaPlayer();
    }

    public boolean isPlaying() {
       return mIsSupposedToBePlaying;
    }

    @Override
    public IBinder onBind(Intent arg0) {
       return mBinder;
    }

    @Override
    public int onStartCommand(Intent intent, int flags, int startId) {

       telephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
       phoneStateListener = new PhoneStateListener() {
           @Override
           public void onCallStateChanged(int state, String incomingNumber) {
               // String stateString = "N/A";
               Log.v("FFmpegService", "Starting CallStateChange");
               switch (state) {
               case TelephonyManager.CALL_STATE_OFFHOOK:
               case TelephonyManager.CALL_STATE_RINGING:
                   if (mMediaPlayerThread != null) {
                       pauseMediaPlayer();
                       isPausedInCall = true;
                   }
                   break;
               case TelephonyManager.CALL_STATE_IDLE:
                   // Phone idle. Start playing.
                   if (mMediaPlayerThread != null) {
                       if (isPausedInCall) {
                           isPausedInCall = false;
                           startMediaPlayer();
                       }
                   }
                   break;
               }
           }
       };

       // Register the listener with the telephony manager
       telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);

       return START_STICKY;
    }

    /**
    * Sets the client using this service.
    * @param client The client of this service, which implements the IMediaPlayerServiceClient interface
    */
    public void setClient(IMediaPlayerServiceClient client) {
       this.mClient = client;
    }


    public void initializePlayer(final String station) {
       //mCurrentStation = station;
       mMediaPlayerThread.initializePlayer(station);
    }

    public void startMediaPlayer() {

       Intent notificationIntent = new Intent(getApplicationContext(), StreamingActivity.class);
       //notificationIntent.putExtra("page", "0");
       //notificationIntent.putExtra("isPlaying", isPlaying());
       notificationIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP);
       PendingIntent contentIntent = PendingIntent.getActivity(getApplicationContext(), 0 , notificationIntent , PendingIntent.FLAG_UPDATE_CURRENT);

       NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(this)
               .setSmallIcon(R.drawable.ic_launcher)
               .setContentTitle("You are listening to Radio...")
               .setContentText("test!!!")
               .setContentIntent(contentIntent);

       startForeground(1, mBuilder.build());

       NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
       notificationManager.notify(1, mBuilder.build());

       mIsSupposedToBePlaying = true;
       mMediaPlayerThread.startMediaPlayer();
    }

    public void dismissNotification(Context context) {
       String ns = Context.NOTIFICATION_SERVICE;
       NotificationManager mNotificationManager = (NotificationManager) getSystemService(ns);
       mNotificationManager.cancel(1);
    }

    /**
    * Pauses playback
    */
    public void pauseMediaPlayer() {
       Log.d("MediaPlayerService","pauseMediaPlayer() called");
       mMediaPlayerThread.pauseMediaPlayer();
       stopForeground(true);
       mIsSupposedToBePlaying = false;
       dismissNotification(this);
    }
    /**
    * Stops playback
    */
    public void stopMediaPlayer() {
       stopForeground(true);
       mMediaPlayerThread.stopMediaPlayer();

       mIsSupposedToBePlaying = false;
       dismissNotification(this);
    }

    public void resetMediaPlayer() {
       mIsSupposedToBePlaying = false;
       stopForeground(true);
       mMediaPlayerThread.resetMediaPlayer();
       dismissNotification(this);
    }

    @Override
    public void onError() {
       mIsSupposedToBePlaying = false;
       mClient.onError();
       dismissNotification(this);
    }

    @Override
    public void onInitializePlayerStart() {
       mClient.onInitializePlayerStart();
    }

    @Override
    public void onInitializePlayerSuccess() {
       startMediaPlayer();
       mClient.onInitializePlayerSuccess();
       mIsSupposedToBePlaying = true;
    }

    public void unRegister() {
       this.mClient = null;
       mIsSupposedToBePlaying = false;
       dismissNotification(this);
    }

    }

    Hoping someone can help me here...