
Recherche avancée
Médias (1)
-
Collections - Formulaire de création rapide
19 février 2013, par
Mis à jour : Février 2013
Langue : français
Type : Image
Autres articles (72)
-
MediaSPIP version 0.1 Beta
16 avril 2011, parMediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
MediaSPIP 0.1 Beta version
25 avril 2011, parMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
Sur d’autres sites (10335)
-
how to play audio from a video file in c#
10 août 2014, par Ivan LisovichFor read video file I use ffmpeg libraries(http://ffmpeg.zeranoe.com/builds/) build ffmpeg-2.2.3-win32-dev.7z.
manage c++ code for read video file :void VideoFileReader::Read( String^ fileName, System::Collections::Generic::List^ imageData, System::Collections::Generic::List^>^ audioData )
{
char *nativeFileName = ManagedStringToUnmanagedUTF8Char(fileName);
libffmpeg::AVFormatContext *pFormatCtx = NULL;
libffmpeg::AVCodec *pCodec = NULL;
libffmpeg::AVCodec *aCodec = NULL;
libffmpeg::av_register_all();
if(libffmpeg::avformat_open_input(&pFormatCtx, nativeFileName, NULL, NULL) != 0)
{
throw gcnew System::Exception( "Couldn't open file" );
}
if(libffmpeg::avformat_find_stream_info(pFormatCtx, NULL) < 0)
{
throw gcnew System::Exception( "Couldn't find stream information" );
}
libffmpeg::av_dump_format(pFormatCtx, 0, nativeFileName, 0);
int videoStream = libffmpeg::av_find_best_stream(pFormatCtx, libffmpeg::AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0);
int audioStream = libffmpeg::av_find_best_stream(pFormatCtx, libffmpeg::AVMEDIA_TYPE_AUDIO, -1, -1, &aCodec, 0);
if(videoStream == -1)
{
throw gcnew System::Exception( "Didn't find a video stream" );
}
if(audioStream == -1)
{
throw gcnew System::Exception( "Didn't find a audio stream" );
}
libffmpeg::AVCodecContext *aCodecCtx = pFormatCtx->streams[audioStream]->codec;
libffmpeg::avcodec_open2(aCodecCtx, aCodec, NULL);
m_channels = aCodecCtx->channels;
m_sampleRate = aCodecCtx->sample_rate;
m_bitsPerSample = aCodecCtx->bits_per_coded_sample;
libffmpeg::AVCodecContext *pCodecCtx = pFormatCtx->streams[videoStream]->codec;
if(libffmpeg::avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
throw gcnew System::Exception( "Could not open codec" );
}
m_width = pCodecCtx->width;
m_height = pCodecCtx->height;
m_framesCount = pFormatCtx->streams[videoStream]->nb_frames;
if (pFormatCtx->streams[videoStream]->r_frame_rate.den == 0)
{
m_frameRate = 25;
}
else
{
m_frameRate = pFormatCtx->streams[videoStream]->r_frame_rate.num / pFormatCtx->streams[videoStream]->r_frame_rate.den;
if (m_frameRate == 0)
{
m_frameRate = 25;
}
}
libffmpeg::AVFrame *pFrame = libffmpeg::av_frame_alloc();
int numBytes = libffmpeg::avpicture_get_size(libffmpeg::PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
libffmpeg::uint8_t *buffer = (libffmpeg::uint8_t *)libffmpeg::av_malloc(numBytes*sizeof(libffmpeg::uint8_t));
struct libffmpeg::SwsContext *sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, libffmpeg::PIX_FMT_RGB24, SWS_BILINEAR, NULL, NULL, NULL);
libffmpeg::AVPacket packet;
libffmpeg::AVFrame *filt_frame = libffmpeg::av_frame_alloc();
while(av_read_frame(pFormatCtx, &packet) >= 0)
{
if(packet.stream_index == videoStream)
{
System::Drawing::Bitmap ^bitmap = nullptr;
int frameFinished;
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if(frameFinished)
{
bitmap = gcnew System::Drawing::Bitmap( pCodecCtx->width, pCodecCtx->height, System::Drawing::Imaging::PixelFormat::Format24bppRgb );
System::Drawing::Imaging::BitmapData^ bitmapData = bitmap->LockBits( System::Drawing::Rectangle( 0, 0, pCodecCtx->width, pCodecCtx->height ), System::Drawing::Imaging::ImageLockMode::ReadOnly, System::Drawing::Imaging::PixelFormat::Format24bppRgb );
libffmpeg::uint8_t* ptr = reinterpret_cast( static_cast( bitmapData->Scan0 ) );
libffmpeg::uint8_t* srcData[4] = { ptr, NULL, NULL, NULL };
int srcLinesize[4] = { bitmapData->Stride, 0, 0, 0 };
libffmpeg::sws_scale( sws_ctx, (libffmpeg::uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, srcData, srcLinesize );
bitmap->UnlockBits( bitmapData );
}
imageData->Add(bitmap);
}
else if(packet.stream_index == audioStream)
{
int b = av_dup_packet(&packet);
if(b >= 0) {
int audio_pkt_size = packet.size;
libffmpeg::uint8_t* audio_pkt_data = packet.data;
while(audio_pkt_size > 0)
{
int got_frame = 0;
int len1 = libffmpeg::avcodec_decode_audio4(aCodecCtx, pFrame, &got_frame, &packet);
if(len1 < 0)
{
audio_pkt_size = 0;
break;
}
audio_pkt_data += len1;
audio_pkt_size -= len1;
if (got_frame)
{
int data_size = libffmpeg::av_samples_get_buffer_size ( NULL, aCodecCtx->channels, pFrame->nb_samples, aCodecCtx->sample_fmt, 1 );
array<byte>^ managedBuf = gcnew array<byte>(data_size);
System::IntPtr iptr = System::IntPtr( pFrame->data[0] );
System::Runtime::InteropServices::Marshal::Copy( iptr, managedBuf, 0, data_size );
audioData->Add(managedBuf);
}
}
}
}
libffmpeg::av_free_packet(&packet);
}
libffmpeg::av_free(buffer);
libffmpeg::av_free(pFrame);
libffmpeg::avcodec_close(pCodecCtx);
libffmpeg::avformat_close_input(&pFormatCtx);
delete [] nativeFileName;
}
</byte></byte>This function returns my images in imageData list and audio in audioData list ;
I normal draw image in my c# code, but i don’t playing audio data.
I try playing audio in NAudio library. But I hear crackle in speakers instead of sounds.
Code in c# playing audio :var WaveFormat = new WaveFormat(m_sampleRate, 16, m_channels)
var _waveProvider = new BufferedWaveProvider(WaveFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(_fileReader.Length) };
var _waveOut = new DirectSoundOut();
_waveOut.Init(_waveProvider);
_waveOut.Play();
foreach (var data in audioData)
{
_waveProvider.AddSamples(data, 0, data.Length);
}What am I doing wrong ?
-
Get RGB values from AVPicture and change to grey-scale in FFMPEG
22 octobre 2014, par user2742299The main motive of my code is to change the RGB values from the AVPicture in FFMPEG.
I have been able to get the image data "data[0]" by following the article : http://blog.tomaka17.com/2012/03/libavcodeclibavformat-tutorial/
I would like to know that how can I access the 3 bytes of pic.data[0] which is in RGB format. I have been trying to access the pic.data[i][j] via for-loop in 2D matrix fashion but jth element>3.
Any guidance in this regard will be helpful.
Code is here :
AVPicture pic;
avpicture_alloc(&pic, PIX_FMT_RGB24, mpAVFrameInput->width,mpAVFrameInput->height);
auto ctxt = sws_getContext(mpAVFrameInput->width,mpAVFrameInput->height,static_cast<pixelformat>(mpAVFrameInput->format),
mpAVFrameInput->width, mpAVFrameInput->height, PIX_FMT_RGB24, SWS_BILINEAR, nullptr, nullptr, nullptr);
if (ctxt == nullptr)
throw std::runtime_error("Error while calling sws_getContext");
sws_scale(ctxt, mpAVFrameInput->data, mpAVFrameInput->linesize, 0, mpAVFrameInput->height, pic.data,
pic.linesize);
for (int i = 0; i < (mpAVFrameInput->height-1); i++) {
for (int j = 0; j < (mpAVFrameInput->width-1); j++) {
printf("\n value: %d",pic.data[0][j]);
}
}
</pixelformat>Pseudo code which is in my mind is :
For each pixel in image {
Red = pic.data[i][j].pixel.RED;
Green = pic.data[i][j].pixel.GREEN;
Blue = pic.data[i][j].pixel.BLUE;
GRAY = (Red+Green+Blue)/3;
Red = GRAY;
Green = GRAY;
Blue = GRAY;
Save Frame;}I am quite new to FFMPEG therefore any guidance and help will be highly appreciable.
Many Thanks
-
MOOV atom is not being written to the output.
18 juillet 2014, par AnilJI am facing a problem where a MOOV atom is not written to the end of the file, and the file is not playable by the vlc player. Also, FFmpeg command gives me the following error.
[mov,mp4,m4a,3gp,3g2,mj2 @ 0x2334ba0] moov atom not found
queueout/1000_wrecord.mp4: Invalid data found when processing inputIn my design, I am using an object of DataChunkQueue class to initialize the IContainer object, where it writes into this queue when it calls IContainer.WritePacket(packet) method. Finally when the recording is complete, I am flushing out this queue into a file. However, when I try to play the file, above error is thrown. When I test with the FLV file type however, I am able to playout the file correctly.
I am not sure what is the issue with the MP4 file and why it does not write the MOOV item to the end. Can anyone offer me an suggestions as to how this can be resolved ?
I am pasting below some of the code snippets for reference.
public class DataChunkQueue implements ByteChannel {
private ConcurrentLinkedQueue<datachunk> mChunkQueue = null;
private static String BASE_PATH = null;
private static String mOutputFileName = null;
private FileChannel mOutputFileChannel = null;
// constructor
public DataChunkQueue() {
mChunkQueue = new ConcurrentLinkedQueue<datachunk>();
}
@Override
public void close() throws IOException {
return;
}
@Override
public boolean isOpen() {
return true;
}
@Override
public int write(ByteBuffer buffer) throws IOException {
DataChunk vChunk = new DataChunk(buffer);
mChunkQueue.add(vChunk);
return 0;
}
public int read(ByteBuffer buffer) throws IOException {
int result = 0;
buffer = mChunkQueue.poll().GetBuffer();
if (buffer != null ) {
result = 0;
} else {
result = 1;
}
return result;
}
}
private boolean InitStreamEncoder() {
DataChunkQueue mOutQueue = null;
// Change this to change the frame rate you record at
mFrameRate = IRational.make(Constants.FRAME_RATE, 1);
// try opening a container format
mOutFormat = IContainerFormat.make();
mOutFormat.setOutputFormat(mRecordFormat, null, null);
// Initialize the output container.
mOutputContainer = IContainer.make();
int retval = mOutputContainer.open(mOutQueue, IContainer.Type.WRITE, mOutFormat);
if (retval < 0)
throw new RuntimeException("could not open data output stream buffer");
// Guess the Encoding CODEC based on the type of input file.
ICodec videoCodec = ICodec.guessEncodingCodec(null, null, ("out." + mRecordFormat), null, ICodec.Type.CODEC_TYPE_VIDEO);
if (videoCodec == null)
throw new RuntimeException("could not guess a codec");
// Initialize the encoding parameters.
mOutStream = mOutputContainer.addNewStream(videoCodec);
mOutStreamCoder = mOutStream.getStreamCoder();
mOutStreamCoder.setNumPicturesInGroupOfPictures(Constants.GOP);
mOutStreamCoder.setCodec(videoCodec);
//mOutStreamCoder.setBitRate(Constants.BITRATE);
//mOutStreamCoder.setBitRateTolerance(Constants.TOLERANCE);
mOutStreamCoder.setPixelType(IPixelFormat.Type.YUV420P);
mOutStreamCoder.setWidth(Constants.MAIN_SCREEN_WIDTH);
mOutStreamCoder.setHeight(Constants.MAIN_SCREEN_HEIGHT);
//mOutStreamCoder.setFlag(IStreamCoder.Flags.FLAG_QSCALE, true);
//mOutStreamCoder.setGlobalQuality(0);
mOutStreamCoder.setFrameRate(mFrameRate);
mOutStreamCoder.setTimeBase(IRational.make(mFrameRate.getDenominator(), mFrameRate.getNumerator()));
retval = mOutStreamCoder.open(null, null);
if (retval < 0) {
System.out.println("could not open input decoder");
return false;
}
retval = mOutputContainer.writeHeader();
if (retval < 0) {
System.out.println("could not write file header");
return false;
}
return true;
}
</datachunk></datachunk>This function is called at the very end to write the trailer.
public void Cleanup() {
if (mOutputContainer != null) {
mOutputContainer.flushPackets();
mOutputContainer.writeTrailer();
mOutputContainer.close();
}
if (mOutStreamCoder != null) {
mOutStreamCoder.close();
mOutStreamCoder = null;
}
}