
Recherche avancée
Autres articles (55)
-
Websites made with MediaSPIP
2 mai 2011, parThis page lists some websites based on MediaSPIP.
-
MediaSPIP Core : La Configuration
9 novembre 2010, parMediaSPIP Core fournit par défaut trois pages différentes de configuration (ces pages utilisent le plugin de configuration CFG pour fonctionner) : une page spécifique à la configuration générale du squelettes ; une page spécifique à la configuration de la page d’accueil du site ; une page spécifique à la configuration des secteurs ;
Il fournit également une page supplémentaire qui n’apparait que lorsque certains plugins sont activés permettant de contrôler l’affichage et les fonctionnalités spécifiques (...) -
Creating farms of unique websites
13 avril 2011, parMediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)
Sur d’autres sites (9084)
-
Trouble syncing libavformat/ffmpeg with x264 and RTP
26 décembre 2012, par Jacob PeddicordI've been working on some streaming software that takes live feeds
from various kinds of cameras and streams over the network using
H.264. To accomplish this, I'm using the x264 encoder directly (with
the "zerolatency" preset) and feeding NALs as they are available to
libavformat to pack into RTP (ultimately RTSP). Ideally, this
application should be as real-time as possible. For the most part,
this has been working well.Unfortunately, however, there is some sort of synchronization issue :
any video playback on clients seems to show a few smooth frames,
followed by a short pause, then more frames ; repeat. Additionally,
there appears to be approximately a 4-second delay. This happens with
every video player I've tried : Totem, VLC, and basic gstreamer pipes.I've boiled it all down to a somewhat small test case :
#include
#include
#include
#include
#include <libavformat></libavformat>avformat.h>
#include <libswscale></libswscale>swscale.h>
#define WIDTH 640
#define HEIGHT 480
#define FPS 30
#define BITRATE 400000
#define RTP_ADDRESS "127.0.0.1"
#define RTP_PORT 49990
struct AVFormatContext* avctx;
struct x264_t* encoder;
struct SwsContext* imgctx;
uint8_t test = 0x80;
void create_sample_picture(x264_picture_t* picture)
{
// create a frame to store in
x264_picture_alloc(picture, X264_CSP_I420, WIDTH, HEIGHT);
// fake image generation
// disregard how wrong this is; just writing a quick test
int strides = WIDTH / 8;
uint8_t* data = malloc(WIDTH * HEIGHT * 3);
memset(data, test, WIDTH * HEIGHT * 3);
test = (test << 1) | (test >> (8 - 1));
// scale the image
sws_scale(imgctx, (const uint8_t* const*) &data, &strides, 0, HEIGHT,
picture->img.plane, picture->img.i_stride);
}
int encode_frame(x264_picture_t* picture, x264_nal_t** nals)
{
// encode a frame
x264_picture_t pic_out;
int num_nals;
int frame_size = x264_encoder_encode(encoder, nals, &num_nals, picture, &pic_out);
// ignore bad frames
if (frame_size < 0)
{
return frame_size;
}
return num_nals;
}
void stream_frame(uint8_t* payload, int size)
{
// initalize a packet
AVPacket p;
av_init_packet(&p);
p.data = payload;
p.size = size;
p.stream_index = 0;
p.flags = AV_PKT_FLAG_KEY;
p.pts = AV_NOPTS_VALUE;
p.dts = AV_NOPTS_VALUE;
// send it out
av_interleaved_write_frame(avctx, &p);
}
int main(int argc, char* argv[])
{
// initalize ffmpeg
av_register_all();
// set up image scaler
// (in-width, in-height, in-format, out-width, out-height, out-format, scaling-method, 0, 0, 0)
imgctx = sws_getContext(WIDTH, HEIGHT, PIX_FMT_MONOWHITE,
WIDTH, HEIGHT, PIX_FMT_YUV420P,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
// set up encoder presets
x264_param_t param;
x264_param_default_preset(&param, "ultrafast", "zerolatency");
param.i_threads = 3;
param.i_width = WIDTH;
param.i_height = HEIGHT;
param.i_fps_num = FPS;
param.i_fps_den = 1;
param.i_keyint_max = FPS;
param.b_intra_refresh = 0;
param.rc.i_bitrate = BITRATE;
param.b_repeat_headers = 1; // whether to repeat headers or write just once
param.b_annexb = 1; // place start codes (1) or sizes (0)
// initalize
x264_param_apply_profile(&param, "high");
encoder = x264_encoder_open(&param);
// at this point, x264_encoder_headers can be used, but it has had no effect
// set up streaming context. a lot of error handling has been ommitted
// for brevity, but this should be pretty standard.
avctx = avformat_alloc_context();
struct AVOutputFormat* fmt = av_guess_format("rtp", NULL, NULL);
avctx->oformat = fmt;
snprintf(avctx->filename, sizeof(avctx->filename), "rtp://%s:%d", RTP_ADDRESS, RTP_PORT);
if (url_fopen(&avctx->pb, avctx->filename, URL_WRONLY) < 0)
{
perror("url_fopen failed");
return 1;
}
struct AVStream* stream = av_new_stream(avctx, 1);
// initalize codec
AVCodecContext* c = stream->codec;
c->codec_id = CODEC_ID_H264;
c->codec_type = AVMEDIA_TYPE_VIDEO;
c->flags = CODEC_FLAG_GLOBAL_HEADER;
c->width = WIDTH;
c->height = HEIGHT;
c->time_base.den = FPS;
c->time_base.num = 1;
c->gop_size = FPS;
c->bit_rate = BITRATE;
avctx->flags = AVFMT_FLAG_RTP_HINT;
// write the header
av_write_header(avctx);
// make some frames
for (int frame = 0; frame < 10000; frame++)
{
// create a sample moving frame
x264_picture_t* pic = (x264_picture_t*) malloc(sizeof(x264_picture_t));
create_sample_picture(pic);
// encode the frame
x264_nal_t* nals;
int num_nals = encode_frame(pic, &nals);
if (num_nals < 0)
printf("invalid frame size: %d\n", num_nals);
// send out NALs
for (int i = 0; i < num_nals; i++)
{
stream_frame(nals[i].p_payload, nals[i].i_payload);
}
// free up resources
x264_picture_clean(pic);
free(pic);
// stream at approx 30 fps
printf("frame %d\n", frame);
usleep(33333);
}
return 0;
}This test shows black lines on a white background that
should move smoothly to the left. It has been written for ffmpeg 0.6.5
but the problem can be reproduced on 0.8 and 0.10 (from what I've tested so far). I've taken some shortcuts in error handling to make this example as short as
possible while still showing the problem, so please excuse some of the
nasty code. I should also note that while an SDP is not used here, I
have tried using that already with similar results. The test can be
compiled with :gcc -g -std=gnu99 streamtest.c -lswscale -lavformat -lx264 -lm -lpthread -o streamtest
It can be played with gtreamer directly :
gst-launch udpsrc port=49990 ! application/x-rtp,payload=96,clock-rate=90000 ! rtph264depay ! decodebin ! xvimagesink
You should immediately notice the stuttering. One common "fix" I've
seen all over the Internet is to add sync=false to the pipeline :gst-launch udpsrc port=49990 ! application/x-rtp,payload=96,clock-rate=90000 ! rtph264depay ! decodebin ! xvimagesink sync=false
This causes playback to be smooth (and near-realtime), but is a
non-solution and only works with gstreamer. I'd like to fix the
problem at the source. I've been able to stream with near-identical
parameters using raw ffmpeg and haven't had any issues :ffmpeg -re -i sample.mp4 -vcodec libx264 -vpre ultrafast -vpre baseline -b 400000 -an -f rtp rtp://127.0.0.1:49990 -an
So clearly I'm doing something wrong. But what is it ?
-
Converting DVD image with subtitles to MKV using avconv
16 janvier 2016, par Carlos Eugenio Thompson PinzónThis is the procedure I know to convert a DVD image to another videoformat (v.g. MP4) :
- concatenate the
VTS_01_
n.VOB
files insideVIDEO_TS
folder (for n >= 0) into a single VOB file. - use
avconv
orffmpeg
in order to convert that VOB into another format.
So far so good, however now I want to convert the DVD image with the subtitles. As far as I know the MKV format supports subtitles, so it seems an obvious choice. Alternatively I might use any other format with hard subtitles (subtitles as part of the video image).
However, the subtitle encoding in the DVD image is
dvdsub
and I get the following errorOnly SUBTITLE_ASS type supported.
Subtitle encoding failedHowever, when running
avconv -codecs
I get :...
DES dvdsub DVD subtitles
...And the
-c:s copy
switch, while it prevents the command to fail, it does not seem to produce a subtitle that the player can understand.So, how can I create
ass
subtitles fromdvdsub
usingavconv
?My VOB file has eight subtitle channels and two audio channels. The Ubuntu video app does not show any subtitles, and only the first audio channel seems to be working, do the DVD image might be broken.
Another file, for a double-layer DVD, displays the Subtitle encoding error, however when using the
-c:s copy
switch it further displays :Application provided invalid, non monotonically increasing dts to muxer in stream 2: 49 >= 49
av_interleaved_write_frame(): Invalid argumentThank you in advance for any ideas on how to solve these problems.
(I am using Ubuntu where
ffmpeg
is an alias foravconv
. I know it is possible to install the real ffmpeg but so far I have not done so.)
update : commands and console outputs :
commands
cat VTS_01_1.VOB VTS_01_2.VOB VTS_01_3.VOB VTS_01_4.VOB VTS_01_5.VOB > ~/temp/mymovie.VOB
cd ~/temp
avconv -i mymovie.VOBoutput
avconv version 0.8.6-6:0.8.6-1ubuntu2, Copyright (c) 2000-2013 the Libav developers
built on Mar 30 2013 22:20:06 with gcc 4.7.2
[mpeg @ 0x1a64d40] max_analyze_duration reached
Input #0, mpeg, from 'mymovie.VOB':
Duration: 00:00:01.95, start: 0.280633, bitrate: -2147483 kb/s
Stream #0.0[0x1e0]: Video: mpeg2video (Main), yuv420p, 720x480 [PAR 8:9 DAR 4:3], 7500 kb/s, 29.97 fps, 29.97 tbr, 90k tbn, 59.94 tbc
Stream #0.1[0x20]: Subtitle: dvdsub
Stream #0.2[0x21]: Subtitle: dvdsub
Stream #0.3[0x22]: Subtitle: dvdsub
Stream #0.4[0x23]: Subtitle: dvdsub
Stream #0.5[0x24]: Subtitle: dvdsub
Stream #0.6[0x25]: Subtitle: dvdsub
Stream #0.7[0x26]: Subtitle: dvdsub
Stream #0.8[0x27]: Subtitle: dvdsub
Stream #0.9[0x80]: Audio: ac3, 48000 Hz, 5.1, s16, 448 kb/s
Stream #0.10[0x81]: Audio: ac3, 48000 Hz, 5.1, s16, 448 kb/s
At least one output file must be specifiedcommand
avconv -i mymovie.VOB mymovie.mkv
output
avconv version 0.8.6-6:0.8.6-1ubuntu2, Copyright (c) 2000-2013 the Libav developers
built on Mar 30 2013 22:20:06 with gcc 4.7.2
[mpeg @ 0x1cdad40] max_analyze_duration reached
Input #0, mpeg, from 'mymovie.VOB':
Duration: 00:00:01.95, start: 0.280633, bitrate: -2147483 kb/s
Stream #0.0[0x1e0]: Video: mpeg2video (Main), yuv420p, 720x480 [PAR 8:9 DAR 4:3], 7500 kb/s, 29.97 fps, 29.97 tbr, 90k tbn, 59.94 tbc
Stream #0.1[0x20]: Subtitle: dvdsub
Stream #0.2[0x21]: Subtitle: dvdsub
Stream #0.3[0x22]: Subtitle: dvdsub
Stream #0.4[0x23]: Subtitle: dvdsub
Stream #0.5[0x24]: Subtitle: dvdsub
Stream #0.6[0x25]: Subtitle: dvdsub
Stream #0.7[0x26]: Subtitle: dvdsub
Stream #0.8[0x27]: Subtitle: dvdsub
Stream #0.9[0x80]: Audio: ac3, 48000 Hz, 5.1, s16, 448 kb/s
Stream #0.10[0x81]: Audio: ac3, 48000 Hz, 5.1, s16, 448 kb/s
File 'mymovie.mkv' already exists. Overwrite ? [y/N] y
[buffer @ 0x1ce23c0] w:720 h:480 pixfmt:yuv420p
Output #0, matroska, to 'mymovie.mkv':
Metadata:
encoder : Lavf53.21.1
Stream #0.0: Video: mpeg4, yuv420p, 720x480 [PAR 8:9 DAR 4:3], q=2-31, 200 kb/s, 1k tbn, 29.97 tbc
Stream #0.1: Audio: libvorbis, 48000 Hz, 5.1, s16
Stream #0.2: Subtitle: ass, 200 kb/s
Stream mapping:
Stream #0:0 -> #0:0 (mpeg2video -> mpeg4)
Stream #0:9 -> #0:1 (ac3 -> libvorbis)
Stream #0:1 -> #0:2 (dvdsub -> ass)
Press ctrl-c to stop encoding
[ass @ 0x1ce0140] Only SUBTITLE_ASS type supported.
Subtitle encoding failedcommand
avconv -i mymovie.VOB -c:s copy mymovie.mkv
output omitted.
command
avconv -i mymovie.mkv
output
avconv version 0.8.6-6:0.8.6-1ubuntu2, Copyright (c) 2000-2013 the Libav developers
built on Mar 30 2013 22:20:06 with gcc 4.7.2
[matroska,webm @ 0xbc1d40] Estimating duration from bitrate, this may be inaccurate
Input #0, matroska,webm, from 'mymovie.mkv':
Metadata:
ENCODER : Lavf53.21.1
Duration: 01:05:09.47, start: 0.000000, bitrate: N/A
Stream #0.0: Video: mpeg4 (Simple Profile), yuv420p, 720x480 [PAR 8:9 DAR 4:3], 29.97 fps, 29.97 tbr, 1k tbn, 30k tbc (default)
Stream #0.1: Audio: vorbis, 48000 Hz, 5.1, s16 (default)
Stream #0.2: Subtitle: dvdsub (default)
At least one output file must be specifiedNow, for the double-layer :
commandscat VTS_01_1.VOB VTS_01_2.VOB VTS_01_3.VOB VTS_01_4.VOB VTS_01_5.VOB VTS_01_6.VOB VTS_01_7.VOB VTS_01_8.VOB > ~/temp/mylongmovie.VOB
cd ~/temp
avconv -i mylongmovie.VOB mylongmovie.mkvoutput
avconv version 0.8.6-6:0.8.6-1ubuntu2, Copyright (c) 2000-2013 the Libav developers
built on Mar 30 2013 22:20:06 with gcc 4.7.2
[mpeg @ 0x13c2d40] max_analyze_duration reached
Input #0, mpeg, from 'Cosmos-0203.VOB':
Duration: 00:00:30.24, start: 0.280633, bitrate: 2103365 kb/s
Stream #0.0[0x1e0]: Video: mpeg2video (Main), yuv420p, 720x480 [PAR 8:9 DAR 4:3], 8000 kb/s, 27.46 fps, 59.94 tbr, 90k tbn, 59.94 tbc
Stream #0.1[0x20]: Subtitle: dvdsub
Stream #0.2[0x21]: Subtitle: dvdsub
Stream #0.3[0x22]: Subtitle: dvdsub
Stream #0.4[0x23]: Subtitle: dvdsub
Stream #0.5[0x24]: Subtitle: dvdsub
Stream #0.6[0x25]: Subtitle: dvdsub
Stream #0.7[0x26]: Subtitle: dvdsub
Stream #0.8[0x27]: Subtitle: dvdsub
Stream #0.9[0x81]: Audio: ac3, 48000 Hz, 5.1, s16, 448 kb/s
Stream #0.10[0x80]: Audio: ac3, 48000 Hz, 5.1, s16, 448 kb/s
File 'mylongmovie.mkv' already exists. Overwrite ? [y/N] y
[buffer @ 0x13ca3c0] w:720 h:480 pixfmt:yuv420p
Output #0, matroska, to 'mylongmovie.mkv':
Metadata:
encoder : Lavf53.21.1
Stream #0.0: Video: mpeg4, yuv420p, 720x480 [PAR 8:9 DAR 4:3], q=2-31, 200 kb/s, 1k tbn, 59.94 tbc
Stream #0.1: Audio: libvorbis, 48000 Hz, 5.1, s16
Stream #0.2: Subtitle: ass, 200 kb/s
Stream mapping:
Stream #0:0 -> #0:0 (mpeg2video -> mpeg4)
Stream #0:9 -> #0:1 (ac3 -> libvorbis)
Stream #0:1 -> #0:2 (dvdsub -> ass)
Press ctrl-c to stop encoding
[ass @ 0x13d19c0] Only SUBTITLE_ASS type supported.
Subtitle encoding failedcommand
avconv -i mylongmovie.VOB -c:s copy mylongmovie.mkv
output
avconv version 0.8.6-6:0.8.6-1ubuntu2, Copyright (c) 2000-2013 the Libav developers
built on Mar 30 2013 22:20:06 with gcc 4.7.2
[mpeg @ 0xce1d40] max_analyze_duration reached
Input #0, mpeg, from 'mylongmovie.VOB':
Duration: 00:00:30.24, start: 0.280633, bitrate: 2103365 kb/s
Stream #0.0[0x1e0]: Video: mpeg2video (Main), yuv420p, 720x480 [PAR 8:9 DAR 4:3], 8000 kb/s, 27.46 fps, 59.94 tbr, 90k tbn, 59.94 tbc
Stream #0.1[0x20]: Subtitle: dvdsub
Stream #0.2[0x21]: Subtitle: dvdsub
Stream #0.3[0x22]: Subtitle: dvdsub
Stream #0.4[0x23]: Subtitle: dvdsub
Stream #0.5[0x24]: Subtitle: dvdsub
Stream #0.6[0x25]: Subtitle: dvdsub
Stream #0.7[0x26]: Subtitle: dvdsub
Stream #0.8[0x27]: Subtitle: dvdsub
Stream #0.9[0x81]: Audio: ac3, 48000 Hz, 5.1, s16, 448 kb/s
Stream #0.10[0x80]: Audio: ac3, 48000 Hz, 5.1, s16, 448 kb/s
File 'mylongmovie.mkv' already exists. Overwrite ? [y/N] y
[buffer @ 0xce93c0] w:720 h:480 pixfmt:yuv420p
Output #0, matroska, to 'mylongmovie.mkv':
Metadata:
encoder : Lavf53.21.1
Stream #0.0: Video: mpeg4, yuv420p, 720x480 [PAR 8:9 DAR 4:3], q=2-31, 200 kb/s, 1k tbn, 59.94 tbc
Stream #0.1: Audio: libvorbis, 48000 Hz, 5.1, s16
Stream #0.2: Subtitle: dvdsub
Stream mapping:
Stream #0:0 -> #0:0 (mpeg2video -> mpeg4)
Stream #0:9 -> #0:1 (ac3 -> libvorbis)
Stream #0:1 -> #0:2 (copy)
Press ctrl-c to stop encoding
[matroska @ 0xce4b40] Application provided invalid, non monotonically increasing dts to muxer in stream 2: 49 >= 49
av_interleaved_write_frame(): Invalid argument - concatenate the
-
Android FFmpegPlayer Streaming Service onClick notification
8 octobre 2013, par agonyI have a MainActivity class that displays the list of streams available for my project and the StreamingActivity class where the streaming is done.
If the user selected an item from the list it will start the StreamingActivity and start playing the stream.
I'm having trouble to continue streaming music when the user pressed the notification and returning it to the StreamingActivity class if the user pressed or clicked the home menu or when the app goes to onDestroy().I'm using FFmpegPlayer for my project 'coz it requires to play mms :// live streams for local FM station.
Here's my code :
public class StreamingActivity extends BaseActivity implements ActionBar.TabListener,
PlayerControlListener, IMediaPlayerServiceClient {
private StatefulMediaPlayer mMediaPlayer;
private FFmpegService mService;
private boolean mBound;
public static final String TAG = "StationActivity";
private static Bundle mSavedInstanceState;
private static PlayerFragment mPlayerFragment;
private static DJListFragment mDjListFragment;
private SectionsPagerAdapter mSectionsPagerAdapter;
private ViewPager mViewPager;
private String stream = "";
private String fhz = "";
private String page = "0";
private Dialog shareDialog;
private ProgressDialog dialog;
private boolean isStreaming;
/*************************************************************************************************************/
@Override
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_station);
Bundle bundle = getIntent().getExtras();
if(bundle !=null){
fhz = bundle.getString("fhz");
stream = bundle.getString("stream");
}
Log.d(TAG, "page: " + page + " fhz: " + fhz + " stream: " + stream + " isStreaming: " + isStreaming);
getSupportActionBar().setTitle("Radio \n" + fhz);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
mPlayerFragment = (PlayerFragment) Fragment.instantiate(this, PlayerFragment.class.getName(), null);
mDjListFragment = (DJListFragment) Fragment.instantiate(this, DJListFragment.class.getName(), null);
mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager());
mViewPager = (ViewPager) findViewById(R.id.pager);
mViewPager.setAdapter(mSectionsPagerAdapter);
mViewPager.setCurrentItem(Integer.parseInt(page));
mSavedInstanceState = savedInstanceState;
Tab playingTab = getSupportActionBar().newTab();
playingTab.setText(getString(R.string.playing_label));
playingTab.setTabListener(this);
Tab djTab = getSupportActionBar().newTab();
djTab.setText(getString(R.string.dj_label));
djTab.setTabListener(this);
getSupportActionBar().addTab(playingTab);
getSupportActionBar().addTab(djTab);
// When swiping between different sections, select the corresponding
// tab. We can also use ActionBar.Tab#select() to do this if we have
// a reference to the Tab.
mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() {
@Override
public void onPageSelected(int position) {
StationActivity.this.getSupportActionBar().setSelectedNavigationItem(position);
}
});
if (mSavedInstanceState != null) {
getSupportActionBar().setSelectedNavigationItem(mSavedInstanceState.getInt("tab", 0));
}
dialog = new ProgressDialog(this);
bindToService();
UriBean.getInstance().setStream(stream);
Log.d(TAG ,"stream: " + UriBean.getInstance().getStream());
}
/********************************************************************************************************/
public class SectionsPagerAdapter extends FragmentPagerAdapter {
public SectionsPagerAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int position) {
if (position == 0) {
return mPlayerFragment;
} else {
return mDjListFragment;
}
}
@Override
public int getCount() {
return 2;
}
}
@Override
public void onTabSelected(Tab tab, FragmentTransaction ft) {
// When the given tab is selected, switch to the corresponding page in the ViewPager.
mViewPager.setCurrentItem(tab.getPosition());
}
@Override
public void onTabUnselected(Tab tab, FragmentTransaction ft) { }
@Override
public void onTabReselected(Tab tab, FragmentTransaction ft) { }
/********************************************************************************************************/
public void showLoadingDialog() {
dialog.setMessage("Buffering...");
dialog.show();
}
public void dismissLoadingDialog() {
dialog.dismiss();
}
/********************************************************************************************************/
/**
* Binds to the instance of MediaPlayerService. If no instance of MediaPlayerService exists, it first starts
* a new instance of the service.
*/
public void bindToService() {
Intent intent = new Intent(this, FFmpegService.class);
if (Util.isFFmpegServiceRunning(getApplicationContext())){
// Bind to Service
Log.i(TAG, "bindService");
bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
} else {
//start service and bind to it
Log.i(TAG, "startService & bindService");
startService(intent);
bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
}
}
/**
* Defines callbacks for service binding, passed to bindService()
*/
private ServiceConnection mConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName className, IBinder serviceBinder) {
Log.d(TAG,"service connected");
//bound with Service. get Service instance
MediaPlayerBinder binder = (FFmpegService.MediaPlayerBinder) serviceBinder;
mService = binder.getService();
//send this instance to the service, so it can make callbacks on this instance as a client
mService.setClient(StationActivity.this);
mBound = true;
Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
//if
startStreaming();
}
@Override
public void onServiceDisconnected(ComponentName arg0) {
mBound = false;
mService = null;
}
};
/********************************************************************************************************/
@Override
public void onPlayerPlayStop() {
Log.d(TAG, "onPlayerPlayStop");
Log.v(TAG, "isStreaming: " + isStreaming);
Log.v(TAG, "mBound: " + mBound);
if (mBound) {
Log.d(TAG, "bound.............");
mMediaPlayer = mService.getMediaPlayer();
//pressed pause ->pause
if (!PlayerFragment.play.isChecked()) {
if (mMediaPlayer.isStarted()) {
Log.d(TAG, "pause");
mService.pauseMediaPlayer();
}
} else { //pressed play
// STOPPED, CREATED, EMPTY, -> initialize
if (mMediaPlayer.isStopped() || mMediaPlayer.isCreated() || mMediaPlayer.isEmpty()) {
startStreaming();
} else if (mMediaPlayer.isPrepared() || mMediaPlayer.isPaused()) { //prepared, paused -> resume play
Log.d(TAG, "start");
mService.startMediaPlayer();
}
}
Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
}
}
/********************************************************************************************************/
@Override
public void onDownload() {
Toast.makeText(this, "Not yet available...", Toast.LENGTH_SHORT).show();
}
@Override
public void onComment() {
FragmentManager fm = getSupportFragmentManager();
DialogFragment newFragment = MyAlertDialogFragment.newInstance();
newFragment.show(fm, "comment_dialog");
}
@Override
public void onShare() {
showShareDialog();
}
/********************************************************************************************************/
private void startStreaming() {
Log.d(TAG, "@startLoading");
boolean isNetworkFound = Util.checkConnectivity(getApplicationContext());
if(isNetworkFound) {
Log.d(TAG, "network found");
mService.initializePlayer(stream);
isStreaming = true;
} else {
Toast.makeText(getApplicationContext(), "No internet connection found...", Toast.LENGTH_SHORT).show();
}
Log.d(TAG, "isStreaming: " + isStreaming);
Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
}
@Override
public void onInitializePlayerStart() {
showLoadingDialog();
}
@Override
public void onInitializePlayerSuccess() {
dismissLoadingDialog();
PlayerFragment.play.setChecked(true);
Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
}
@Override
public void onError() {
Toast.makeText(getApplicationContext(), "Not connected to the server...", Toast.LENGTH_SHORT).show();
}
@Override
public void onDestroy() {
Log.d(TAG, "onDestroy");
super.onDestroy();
uiHelper.onDestroy();
Log.d(TAG, "isPlaying === SERVICE: " + mService.isPlaying());
if (mBound) {
mService.unRegister();
unbindService(mConnection);
mBound = false;
}
Log.d(TAG, "service: " + Util.isFFmpegServiceRunning(getApplicationContext()));
}
@Override
public void onStop(){
Log.d(TAG, "onStop");
super.onStop();
}
/*******************************************************************************************************/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int itemId = item.getItemId();
switch (itemId){
case android.R.id.home:
onBackPressed();
break;
default:
break;
}
return true;
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
Log.d(TAG, "@onKeyDown");
if (keyCode == KeyEvent.KEYCODE_BACK && event.getRepeatCount() == 0){
//this.moveTaskToBack(true);
onBackPressed();
return true;
}
return super.onKeyDown(keyCode, event);
}
}
public class FFmpegService extends Service implements IMediaPlayerThreadClient {
private FFmpegPlayerThread mMediaPlayerThread = new FFmpegPlayerThread(this);
private final Binder mBinder = new MediaPlayerBinder();
private IMediaPlayerServiceClient mClient;
//private StreamStation mCurrentStation;
private boolean mIsSupposedToBePlaying = false;
private boolean isPausedInCall = false;
private PhoneStateListener phoneStateListener;
private TelephonyManager telephonyManager;
@Override
public void onCreate(){
mMediaPlayerThread.start();
}
/**
* A class for clients binding to this service. The client will be passed an object of this class
* via its onServiceConnected(ComponentName, IBinder) callback.
*/
public class MediaPlayerBinder extends Binder {
/**
* Returns the instance of this service for a client to make method calls on it.
* @return the instance of this service.
*/
public FFmpegService getService() {
return FFmpegService.this;
}
}
/**
* Returns the contained StatefulMediaPlayer
* @return
*/
public StatefulMediaPlayer getMediaPlayer() {
return mMediaPlayerThread.getMediaPlayer();
}
public boolean isPlaying() {
return mIsSupposedToBePlaying;
}
@Override
public IBinder onBind(Intent arg0) {
return mBinder;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
telephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
phoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String incomingNumber) {
// String stateString = "N/A";
Log.v("FFmpegService", "Starting CallStateChange");
switch (state) {
case TelephonyManager.CALL_STATE_OFFHOOK:
case TelephonyManager.CALL_STATE_RINGING:
if (mMediaPlayerThread != null) {
pauseMediaPlayer();
isPausedInCall = true;
}
break;
case TelephonyManager.CALL_STATE_IDLE:
// Phone idle. Start playing.
if (mMediaPlayerThread != null) {
if (isPausedInCall) {
isPausedInCall = false;
startMediaPlayer();
}
}
break;
}
}
};
// Register the listener with the telephony manager
telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
return START_STICKY;
}
/**
* Sets the client using this service.
* @param client The client of this service, which implements the IMediaPlayerServiceClient interface
*/
public void setClient(IMediaPlayerServiceClient client) {
this.mClient = client;
}
public void initializePlayer(final String station) {
//mCurrentStation = station;
mMediaPlayerThread.initializePlayer(station);
}
public void startMediaPlayer() {
Intent notificationIntent = new Intent(getApplicationContext(), StreamingActivity.class);
//notificationIntent.putExtra("page", "0");
//notificationIntent.putExtra("isPlaying", isPlaying());
notificationIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP);
PendingIntent contentIntent = PendingIntent.getActivity(getApplicationContext(), 0 , notificationIntent , PendingIntent.FLAG_UPDATE_CURRENT);
NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(this)
.setSmallIcon(R.drawable.ic_launcher)
.setContentTitle("You are listening to Radio...")
.setContentText("test!!!")
.setContentIntent(contentIntent);
startForeground(1, mBuilder.build());
NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(1, mBuilder.build());
mIsSupposedToBePlaying = true;
mMediaPlayerThread.startMediaPlayer();
}
public void dismissNotification(Context context) {
String ns = Context.NOTIFICATION_SERVICE;
NotificationManager mNotificationManager = (NotificationManager) getSystemService(ns);
mNotificationManager.cancel(1);
}
/**
* Pauses playback
*/
public void pauseMediaPlayer() {
Log.d("MediaPlayerService","pauseMediaPlayer() called");
mMediaPlayerThread.pauseMediaPlayer();
stopForeground(true);
mIsSupposedToBePlaying = false;
dismissNotification(this);
}
/**
* Stops playback
*/
public void stopMediaPlayer() {
stopForeground(true);
mMediaPlayerThread.stopMediaPlayer();
mIsSupposedToBePlaying = false;
dismissNotification(this);
}
public void resetMediaPlayer() {
mIsSupposedToBePlaying = false;
stopForeground(true);
mMediaPlayerThread.resetMediaPlayer();
dismissNotification(this);
}
@Override
public void onError() {
mIsSupposedToBePlaying = false;
mClient.onError();
dismissNotification(this);
}
@Override
public void onInitializePlayerStart() {
mClient.onInitializePlayerStart();
}
@Override
public void onInitializePlayerSuccess() {
startMediaPlayer();
mClient.onInitializePlayerSuccess();
mIsSupposedToBePlaying = true;
}
public void unRegister() {
this.mClient = null;
mIsSupposedToBePlaying = false;
dismissNotification(this);
}
}Hoping someone can help me here...