
Recherche avancée
Médias (2)
-
Core Media Video
4 avril 2013, par
Mis à jour : Juin 2013
Langue : français
Type : Video
-
Video d’abeille en portrait
14 mai 2011, par
Mis à jour : Février 2012
Langue : français
Type : Video
Autres articles (84)
-
Pas question de marché, de cloud etc...
10 avril 2011Le vocabulaire utilisé sur ce site essaie d’éviter toute référence à la mode qui fleurit allègrement
sur le web 2.0 et dans les entreprises qui en vivent.
Vous êtes donc invité à bannir l’utilisation des termes "Brand", "Cloud", "Marché" etc...
Notre motivation est avant tout de créer un outil simple, accessible à pour tout le monde, favorisant
le partage de créations sur Internet et permettant aux auteurs de garder une autonomie optimale.
Aucun "contrat Gold ou Premium" n’est donc prévu, aucun (...) -
Activation de l’inscription des visiteurs
12 avril 2011, parIl est également possible d’activer l’inscription des visiteurs ce qui permettra à tout un chacun d’ouvrir soit même un compte sur le canal en question dans le cadre de projets ouverts par exemple.
Pour ce faire, il suffit d’aller dans l’espace de configuration du site en choisissant le sous menus "Gestion des utilisateurs". Le premier formulaire visible correspond à cette fonctionnalité.
Par défaut, MediaSPIP a créé lors de son initialisation un élément de menu dans le menu du haut de la page menant (...) -
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...)
Sur d’autres sites (9772)
-
FFMPEG AAC encoding causes audio to be lower in pitch
14 février 2017, par Paul KnopfI built a sample application that encodes AAC (from PortAudio) into a MP4 container (no video stream).
The resulting audio is lower in pitch.
#include "stdafx.h"
#include "TestRecording.h"
#include "libffmpeg.h"
TestRecording::TestRecording()
{
}
TestRecording::~TestRecording()
{
}
struct RecordingContext
{
RecordingContext()
{
formatContext = NULL;
audioStream = NULL;
audioFrame = NULL;
audioFrameframeNumber = 0;
}
libffmpeg::AVFormatContext* formatContext;
libffmpeg::AVStream* audioStream;
libffmpeg::AVFrame* audioFrame;
int audioFrameframeNumber;
};
static int AudioRecordCallback(const void *inputBuffer, void *outputBuffer,
unsigned long framesPerBuffer,
const PaStreamCallbackTimeInfo* timeInfo,
PaStreamCallbackFlags statusFlags,
void *userData)
{
RecordingContext* recordingContext = (RecordingContext*)userData;
libffmpeg::avcodec_fill_audio_frame(recordingContext->audioFrame,
recordingContext->audioFrame->channels,
recordingContext->audioStream->codec->sample_fmt,
static_cast<const unsigned="unsigned">(inputBuffer),
(framesPerBuffer * sizeof(float) * recordingContext->audioFrame->channels),
0);
libffmpeg::AVPacket pkt;
libffmpeg::av_init_packet(&pkt);
pkt.data = NULL;
pkt.size = 0;
int gotpacket;
int result = avcodec_encode_audio2(recordingContext->audioStream->codec, &pkt, recordingContext->audioFrame, &gotpacket);
if (result < 0)
{
LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't encode the audio frame to acc");
return paContinue;
}
if (gotpacket)
{
pkt.stream_index = recordingContext->audioStream->index;
recordingContext->audioFrameframeNumber++;
// this codec requires no bitstream filter, just send it to the muxer!
result = libffmpeg::av_write_frame(recordingContext->formatContext, &pkt);
if (result < 0)
{
LOG(ERROR) << "Couldn't write the encoded audio frame";
libffmpeg::av_free_packet(&pkt);
return paContinue;
}
libffmpeg::av_free_packet(&pkt);
}
return paContinue;
}
static bool InitializeRecordingContext(RecordingContext* recordingContext)
{
int result = libffmpeg::avformat_alloc_output_context2(&recordingContext->formatContext, NULL, NULL, "C:\\Users\\Paul\\Desktop\\test.mp4");
if (result < 0)
{
LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't create output format context");
return false;
}
libffmpeg::AVCodec *audioCodec;
audioCodec = libffmpeg::avcodec_find_encoder(libffmpeg::AV_CODEC_ID_AAC);
if (audioCodec == NULL)
{
LOG(ERROR) << "Couldn't find the encoder for AAC";
}
recordingContext->audioStream = libffmpeg::avformat_new_stream(recordingContext->formatContext, audioCodec);
if (!recordingContext->audioStream)
{
LOG(ERROR) << "Couldn't create the audio stream";
return false;
}
recordingContext->audioStream->codec->bit_rate = 64000;
recordingContext->audioStream->codec->sample_fmt = libffmpeg::AV_SAMPLE_FMT_FLTP;
recordingContext->audioStream->codec->sample_rate = 48000;
recordingContext->audioStream->codec->channel_layout = AV_CH_LAYOUT_STEREO;
recordingContext->audioStream->codec->channels = libffmpeg::av_get_channel_layout_nb_channels(recordingContext->audioStream->codec->channel_layout);
recordingContext->audioStream->codecpar->bit_rate = recordingContext->audioStream->codec->bit_rate;
recordingContext->audioStream->codecpar->format = recordingContext->audioStream->codec->sample_fmt;
recordingContext->audioStream->codecpar->sample_rate = recordingContext->audioStream->codec->sample_rate;
recordingContext->audioStream->codecpar->channel_layout = recordingContext->audioStream->codec->channel_layout;
recordingContext->audioStream->codecpar->channels = recordingContext->audioStream->codec->channels;
result = libffmpeg::avcodec_open2(recordingContext->audioStream->codec, audioCodec, NULL);
if (result < 0)
{
LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't open the audio codec");
return false;
}
// create a new frame to store the audio samples
recordingContext->audioFrame = libffmpeg::av_frame_alloc();
if (!recordingContext->audioFrame)
{
LOG(ERROR) << "Couldn't alloce the output audio frame";
return false;
}
recordingContext->audioFrame->nb_samples = recordingContext->audioStream->codec->frame_size;
recordingContext->audioFrame->channel_layout = recordingContext->audioStream->codec->channel_layout;
recordingContext->audioFrame->channels = recordingContext->audioStream->codec->channels;
recordingContext->audioFrame->format = recordingContext->audioStream->codec->sample_fmt;
recordingContext->audioFrame->sample_rate = recordingContext->audioStream->codec->sample_rate;
result = libffmpeg::av_frame_get_buffer(recordingContext->audioFrame, 0);
if (result < 0)
{
LOG(ERROR) << "Coudln't initialize the output audio frame buffer";
return false;
}
// some formats want video_stream headers to be separate
if (!strcmp(recordingContext->formatContext->oformat->name, "mp4") || !strcmp(recordingContext->formatContext->oformat->name, "mov") || !strcmp(recordingContext->formatContext->oformat->name, "3gp"))
{
recordingContext->audioStream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
// open the ouput file
if (!(recordingContext->formatContext->oformat->flags & AVFMT_NOFILE))
{
result = libffmpeg::avio_open(&recordingContext->formatContext->pb, recordingContext->formatContext->filename, AVIO_FLAG_WRITE);
if (result < 0)
{
LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't open the output file");
return false;
}
}
// write the stream headers
result = libffmpeg::avformat_write_header(recordingContext->formatContext, NULL);
if (result < 0)
{
LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't write the headers to the file");
return false;
}
return true;
}
static bool FinalizeRecordingContext(RecordingContext* recordingContext)
{
int result = 0;
// write the trailing information
if (recordingContext->formatContext->pb)
{
result = libffmpeg::av_write_trailer(recordingContext->formatContext);
if (result < 0)
{
LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't write the trailer information");
return false;
}
}
// close all the codes
for (int i = 0; i < (int)recordingContext->formatContext->nb_streams; i++)
{
result = libffmpeg::avcodec_close(recordingContext->formatContext->streams[i]->codec);
if (result < 0)
{
LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't close the codec");
return false;
}
}
// close the output file
if (recordingContext->formatContext->pb)
{
if (!(recordingContext->formatContext->oformat->flags & AVFMT_NOFILE))
{
result = libffmpeg::avio_close(recordingContext->formatContext->pb);
if (result < 0)
{
LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't close the output file");
return false;
}
}
}
// free the format context and all of its data
libffmpeg::avformat_free_context(recordingContext->formatContext);
recordingContext->formatContext = NULL;
recordingContext->audioStream = NULL;
if (recordingContext->audioFrame)
{
libffmpeg::av_frame_free(&recordingContext->audioFrame);
recordingContext->audioFrame = NULL;
}
return true;
}
int TestRecording::Test()
{
PaError result = paNoError;
result = Pa_Initialize();
if (result != paNoError) LOGINT_WITH_MESSAGE(ERROR, result, "Error initializing audio device framework");
RecordingContext recordingContext;
if (!InitializeRecordingContext(&recordingContext))
{
LOG(ERROR) << "Couldn't start recording file";
return 0;
}
auto defaultDevice = Pa_GetDefaultInputDevice();
auto deviceInfo = Pa_GetDeviceInfo(defaultDevice);
PaStreamParameters inputParameters;
inputParameters.device = defaultDevice;
inputParameters.channelCount = 2;
inputParameters.sampleFormat = paFloat32;
inputParameters.suggestedLatency = deviceInfo->defaultLowInputLatency;
inputParameters.hostApiSpecificStreamInfo = NULL;
PaStream* stream = NULL;
result = Pa_OpenStream(
&stream,
&inputParameters,
NULL,
48000,
1024,
paClipOff,
AudioRecordCallback,
&recordingContext);
if (result != paNoError)LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't open the audio stream");
result = Pa_StartStream(stream);
if (result != paNoError)LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't start the audio stream");
Sleep(1000 * 5);
result = Pa_StopStream(stream);
if (result != paNoError)LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't stop the audio stream");
if (!FinalizeRecordingContext(&recordingContext)) LOG(ERROR) << "Couldn't stop recording file";
result = Pa_CloseStream(stream);
if (result != paNoError)LOGINT_WITH_MESSAGE(ERROR, result, "Couldn't stop the audio stream");
return 0;
}
</const>Here is the
stdout
, in case it helps.https://gist.github.com/pauldotknopf/9f24a604ce1f8a081aa68da1bf169e98
Why is the audio lower in pitch ? I assume I am overlooking a parameter that needs to be configured between PortAudio and FFMPEG. Is there something super obvious that I am missing ?
-
Anomalie #3921 : verifier url site
23 mars 2017, par jluc -Oui, super ! ça marche bien avec demo.spip.net
ou bien au choix :
Oui, désolé ! excusez le dérangement...
-
Progress Bar findviewbyid returns null
1er avril 2017, par ziad kiwanHelp me please it’s getting frustrating !
i’m doing a chat application where i’m adding views to a linear layout programmatically each time a user press the send button the code is below :
public void appendToMessageHistory(String id,String uname, String messa,String messageType, final String filepath, String DownloadStatus,boolean internet,int type) {
//TextView tv=new TextView(Messaging.this);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(Width, LinearLayout.LayoutParams.WRAP_CONTENT);
lp.setMargins(0, 10, 0, 0);
View v = getLayoutInflater().inflate(R.layout.message_entity, null);
TextView mess;
TextView time;
final ImageView iv;
final ProgressBar progressBar;
ImageView statusiv;
if (friend.userName.equals(uname)) {
v = getLayoutInflater().inflate(R.layout.message_entity, null);
v.setId(Integer.parseInt(id));
mess = (TextView) v.findViewById(R.id.message_entity_message);
time = (TextView) v.findViewById(R.id.message_entity_time);
iv = (ImageView) v.findViewById(R.id.message_entity_imageview);
statusiv = (ImageView) v.findViewById(R.id.imageView);
progressBar = (ProgressBar) v.findViewById(R.id.message_entity_progressbar);
} else {
v = getLayoutInflater().inflate(R.layout.message_entity_right, null);
lp.gravity = Gravity.RIGHT;
v.setId(Integer.parseInt(id));
mess = (TextView) v.findViewById(R.id.message_entity_right_message);
time = (TextView) v.findViewById(R.id.message_entity_right_time);
iv = (ImageView) v.findViewById(R.id.message_entity_right_imageview);
statusiv = (ImageView)v.findViewById(R.id.imageViewleft);
progressBar = (ProgressBar) v.findViewById(R.id.message_entity_Right_progressbar);
}
try {
progressBar.setId(Integer.parseInt(id));
} catch (Exception e) {
e.printStackTrace();
}
if (messageType != null) {
if (messageType.equals(MessageInfo.MESSAGE_TYPE_PIC)) {
iv.setImageBitmap(ImageHandlet.GetBitmapFromPath(filepath));
progressBar.setVisibility(View.GONE);
iv.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent();
intent.setAction(Intent.ACTION_VIEW);
intent.setDataAndType(Uri.parse("file://" + filepath), "image/png");
startActivity(intent);
Log.d("FIlePAth", filepath);
}
});
} else if (messageType.equals(MessageInfo.MESSAGE_TYPE_VIDEO)) {
if (DownloadStatus.equals(LocalStorageHandler.DOWNLOADED)) {
if(!(friend.userName.equals(uname))) {
if (type != 3) {
Bitmap bitTh = ThumbnailUtils.createVideoThumbnail(filepath, MediaStore.Images.Thumbnails.MINI_KIND);
iv.setImageBitmap(bitTh);
progressBar.setVisibility(View.GONE);
iv.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(filepath));
intent.setDataAndType(Uri.parse(filepath), "video/mp4");
startActivity(intent);
}
});
} else {
Log.w("Zipping","Progress Bar");
progressBar.setVisibility(View.VISIBLE);
}
} else {
Bitmap bitTh = ThumbnailUtils.createVideoThumbnail(filepath, MediaStore.Images.Thumbnails.MINI_KIND);
iv.setImageBitmap(bitTh);
progressBar.setVisibility(View.GONE);
iv.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(filepath));
intent.setDataAndType(Uri.parse(filepath), "video/mp4");
startActivity(intent);
}
});
}
} else if (DownloadStatus.equals(LocalStorageHandler.NotDOWNLOADED)) {
iv.setTag(id);
iv.setImageResource(R.drawable.download);
iv.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
progressBar.setVisibility(View.VISIBLE);
Cursor c = localstoragehandler.getIDnfo(iv.getTag().toString());
String filepath = "";
//Toast.makeText(getApplication(),iv.getTag().toString()+" iD " +iv.getId(),Toast.LENGTH_SHORT).show();
while (c.moveToNext()) {
String msg0 = c.getString(0);
String msg2 = c.getString(2);
String msg3 = c.getString(3);
String msg4 = c.getString(4);
String msg5 = c.getString(5);
String msg6 = c.getString(6);
filepath = msg5;
Log.d("-----------Vedio-----", "------------------");
Log.d("DATABASE---------", msg0);
Log.d("DATABASE-------", msg2);
Log.d("DATABASE---------", msg3);
Log.d("DATABASE-----", msg4 + "");
Log.d("DATABASE-------", msg5 + "");
Log.d("DATABASE----------", msg6 + "");
Log.d("--------END-------", "-------END-----------");
}
Toast.makeText(getApplicationContext(), filepath.toString() + iv.getTag().toString(), Toast.LENGTH_SHORT).show();
DownloadFileFromURL downloadFileFromURL = new DownloadFileFromURL(filepath, iv.getTag().toString());
downloadFileFromURL.execute("");
}
});
}
} else {
iv.setVisibility(View.GONE);
progressBar.setVisibility(View.GONE);
}
}
if(!(messageType.equals(MessageInfo.MESSAGE_TYPE_VIDEO))) {
if (!internet) {
statusiv.setImageResource(R.drawable.noconnectionl);
}
}
mess.setText(messa);
//time.setText(sendt);
v.setLayoutParams(lp);
final View lastview = v;
runOnUiThread(new Runnable() {
@Override
public void run() {
mEssageBox.addView(lastview);
}
});
scrollView.post(new Runnable() {
@Override
public void run() {
scrollView.fullScroll(View.FOCUS_DOWN);
messageText.requestFocus();
}
});
}and here is the XML of the dialogbox :
<?xml version="1.0" encoding="utf-8"?>
<relativelayout>
<imageview></imageview>
<relativelayout>
<textview></textview>
<textview></textview>
<imageview></imageview>
<progressbar style="?android:attr/progressBarStyleLarge"></progressbar>
</relativelayout>
</relativelayout>And here is the code to send the video message :
final String message = messageText.getText().toString();
final Long rowid = localstoragehandler.insert(imService.getUsername(), friend.userName, message, MessageInfo.MESSAGE_TYPE_VIDEO, ZippedVideoPath, LocalStorageHandler.DOWNLOADED); //insert data into db and get to chat_id
appendToMessageHistory(rowid + "", imService.getUsername(), message, MessageInfo.MESSAGE_TYPE_VIDEO, ZippedVideoPath, LocalStorageHandler.DOWNLOADED, internet,3);// append the video with type 3 to just display the progress
messageText.setText("Video");
String newstr = null;
// intialize the asynctask to upload the video
class UploadVideo extends AsyncTask {
ProgressDialog uploading;
View V;
UploadVideo(View v){
V = v;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
// uploading = ProgressDialog.show(Messaging.this, "Uploading File", "Please wait...", false, false);
}
@Override
protected String doInBackground(Void... params) {
return imService.sendVideoMessage(imService.getUsername(), friend.userName, message, MessageInfo.MESSAGE_TYPE_VIDEO, "", ZippedVideoPath);
}
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
// RelativeLayout v = (RelativeLayout) V.findViewById(rowid.intValue());
// Log.w("Zipping",v.toString());
// ProgressBar progressBar1 = (ProgressBar) V.findViewById(R.id.message_entity_Right_progressbar);
// Log.w("Zipping",progressBar1.toString());
// ImageView iv = (ImageView) v.findViewById(R.id.message_entity_right_imageview);
// progressBar1.setVisibility(View.GONE);
// Log.d("POST Execute", s + "");
//textViewResponse.setText(Html.fromHtml("<b>Uploaded at <a href="http://stackoverflow.com/feeds/tag/" + s + "">" + s + "</a></b>"));
//textViewResponse.setMovementMethod(LinkMovementMethod.getInstance());
// here i am preparing the path so when the compress is done to save into.
}
}
if (null != absolute && absolute.length() > 0) {
int endIndex = absolute.lastIndexOf("/");
if (endIndex != -1) {
newstr = absolute.substring(endIndex, absolute.length()); // not forgot to put check if(endIndex != -1)
}
}
Log.w("Zipping", newstr);
final File videosdir = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/videos/");
if (!videosdir.exists()) {
videosdir.mkdirs();
}
ZippedVideoPath = videosdir.getAbsolutePath() + "/" + newstr;
Log.w("Zipping", ZippedVideoPath);
// here i got the ffmpeg library from the net to compress the video and i'm preparing the commands
String[] complexCommand = {"-y", "-i", absolute, "-strict", "experimental", "-r", "25", "-vcodec", "mpeg4", "-b:a", "150k", "-ab", "48000", "-ac", "2", "-ar", "22050", ZippedVideoPath};
FFmpeg ffmpeg = FFmpeg.getInstance(getBaseContext());
try {
// to execute "ffmpeg -version" command you just need to pass "-version"
ffmpeg.execute(complexCommand, new ExecuteBinaryResponseHandler() {
@Override
public void onStart() {
Log.w("Zipping", "started");
}
@Override
public void onProgress(String message) {
}
@Override
public void onFailure(String message) {
Log.w("Zipping", message);
}
@Override
public void onSuccess(String message1) {
Log.w("Zipping","Success");
RelativeLayout v = (RelativeLayout) mEssageBox.findViewById(rowid.intValue());
Log.w("VideoView",v.toString());
ProgressBar progressBar = (ProgressBar) v.findViewById(R.id.message_entity_Right_progressbar);
ImageView iv = (ImageView) v.findViewById(R.id.message_entity_right_imageview);
// progressBar.setVisibility(View.GONE);
Bitmap bitTh = ThumbnailUtils.createVideoThumbnail(ZippedVideoPath, MediaStore.Images.Thumbnails.MINI_KIND);
iv.setImageBitmap(bitTh);
progressBar.setVisibility(View.GONE);
iv.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(ZippedVideoPath));
intent.setDataAndType(Uri.parse(ZippedVideoPath), "video/mp4");
startActivity(intent);
}
});
UploadVideo uv = new UploadVideo(v);
uv.execute();
}
@Override
public void onFinish() {
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
Log.w("Zipping", e.toString());
}
}okay now to the problem when i call the findviewbyid for the progress bar in sendvideomessage ffmpeg onsuccess callback it returns null even though the findviewbyid for the the imageview which is in the same layout it returns the view ! so please tell me what is the problem
public void onSuccess(String message1) {
Log.w("Zipping","Success");
RelativeLayout v = (RelativeLayout) mEssageBox.findViewById(rowid.intValue()); ;
ProgressBar progressBar = (ProgressBar) v.findViewById(R.id.message_entity_Right_progressbar); // this keeps returning null!
ImageView iv = (ImageView) v.findViewById(R.id.message_entity_right_imageview); // returns the view
// progressBar.setVisibility(View.GONE);
Bitmap bitTh = ThumbnailUtils.createVideoThumbnail(ZippedVideoPath, MediaStore.Images.Thumbnails.MINI_KIND);
iv.setImageBitmap(bitTh);
progressBar.setVisibility(View.GONE);
iv.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(ZippedVideoPath));
intent.setDataAndType(Uri.parse(ZippedVideoPath), "video/mp4");
startActivity(intent);
}
});
UploadVideo uv = new UploadVideo(v);
uv.execute();
}Thank you !