
Recherche avancée
Médias (1)
-
SWFUpload Process
6 septembre 2011, par
Mis à jour : Septembre 2011
Langue : français
Type : Texte
Autres articles (103)
-
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir -
Use, discuss, criticize
13 avril 2011, parTalk to people directly involved in MediaSPIP’s development, or to people around you who could use MediaSPIP to share, enhance or develop their creative projects.
The bigger the community, the more MediaSPIP’s potential will be explored and the faster the software will evolve.
A discussion list is available for all exchanges between users. -
Librairies et binaires spécifiques au traitement vidéo et sonore
31 janvier 2010, parLes logiciels et librairies suivantes sont utilisées par SPIPmotion d’une manière ou d’une autre.
Binaires obligatoires FFMpeg : encodeur principal, permet de transcoder presque tous les types de fichiers vidéo et sonores dans les formats lisibles sur Internet. CF ce tutoriel pour son installation ; Oggz-tools : outils d’inspection de fichiers ogg ; Mediainfo : récupération d’informations depuis la plupart des formats vidéos et sonores ;
Binaires complémentaires et facultatifs flvtool2 : (...)
Sur d’autres sites (7464)
-
Compressed SWF format not supported/cws2fws issue
16 septembre 2011, par CodedMonkeyI am trying to convert .swf to a .flv file using ffmpeg. When I got that error, I searched around and found that I should use cws2fws to somehow decompress the .swf file so that it can be converted by ffmpeg. However, apparently I need to compile the cws2fws.c myself, but it requires so many other libs and files and none of them are located on any 1 site. Is there a precompiled version of cws2fws available somewhere for download, or can I solved my initial error some other way ?
-
video streaming - mp4 file (x264) - and skip to location
14 mars 2011, par RaoulPreviously I was using apache to serve .flv files which an embedded player on an intranet page was playing.
Is it possible to stream mp4 files (x264 encoded) in a similar manner ? Are there any open source scripts/solutions for doing proper streaming (e.g. people can skip to skip to a part of the video without having to download the parts they've skiped over) ?
If anyone is doing someting similar I'd love to hear from them
Thanks
-
libavcodec ffmpeg huge memory leak
12 décembre 2016, par aleksandar kamenjasevici have trouble with ffmpeg and memory leak.
I have class for decoding video, and i make instance of that class for every video file that i have in playlist.
When i broadcasting video file i don’t have memory leak, only i have when create instance of ffmpeg class. I was play video file in loop for 2 days without increasing memory.This is my decoding code :
if(av_read_frame(pFormatCtx,&packet)>=0){
if(packet.stream_index==videoStream){
if(first_dts_video==AV_NOPTS_VALUE){
first_dts_video=packet.pts;
}
avcodec_decode_video2(pCodecCtx,pFrameOrg,&frameFinished,&packet);
if(frameFinished){
if(first){
first=false;
buffer=(uint8_t*)malloc(iNumBytes*sizeof(uint8_t));
avpicture_fill((AVPicture*)pFrameRGB,buffer,PIX_FMT_RGB24,oWidth,oHeight);
sws_scale(pSwsCtx,(uint8_t const * const *)pFrameOrg->data,pFrameOrg->linesize,0,pCodecCtx->height,pFrameRGB->data,pFrameRGB->linesize);
m_file->fillImage(pFrameRGB->data[0],pFrameRGB->linesize[0],iNumBytes);
m_file->ready=true;
m_file->wait.notify_all();
av_frame_free(&pFrameRGB);
if(buffer!=NULL)
free(buffer);
}
buf=(videoBuff*)malloc(sizeof(videoBuff));
if(buf==NULL){
threadRunning=false;
continue;
}
buf->buffer=malloc(bytes*sizeof(uint8_t));
if(buf->buffer==NULL){
threadRunning=false;
continue;
}
//qDebug()<isAvi()&&pCodecCtx->pix_fmt==PIX_FMT_YUV420P){
av_buffersrc_add_frame(filter_src_ctx,pFrameOrg);//filter
while (1) {//filter
int ret = av_buffersink_get_frame(filter_sink_ctx, filter_frame);
if (ret == AVERROR(EAGAIN)){
// qDebug()<<"EAGAIN";
break;
}else if( ret == AVERROR_EOF){
// qDebug()<<"AVERROR_EOF";
break;
}
if (ret < 0){
// qDebug()<data,filter_frame->linesize,0,pCodecCtx->height,pFrameUYVY422->data,pFrameUYVY422->linesize);//stara verzija je koristila pFrameOrg
av_frame_unref(filter_frame);
}
av_frame_unref(filter_frame);//ocisti frame od filtera
}else{
sws_scale(pSwsCtxUYVY422,(uint8_t const * const*)pFrameOrg->data,pFrameOrg->linesize,0,pCodecCtx->height,pFrameUYVY422->data,pFrameUYVY422->linesize);
}
buf->lineSize=pFrameUYVY422->linesize[0];
buf->bufSize=bytes;
memcpy(buf->buffer,pFrameUYVY422->data[0],bytes);
frameCount++;
m_file->putVideoBuf(buf);
av_packet_unref(&packet);
}else{
av_packet_unref(&packet);
qDebug()<<"OCE OCE";
if(syncVideo){
frameCount++;
m_file->putVideoBuf(NULL);
}
}
}else if(packet.stream_index==audioStream){
if(firstAudio){
firstAudio=false;
}
m1.lock();
audioPackets.append(packet);
m1.unlock();
}else{
av_packet_unref(&packet);
}
}this is my destructor :
MYAVClass::~MYAVClass(){
m1.unlock();
this->m1.lock();
threadRunning=false;
if(t1!=NULL){
this->m1.unlock();
if(this->t1->joinable())
this->t1->join();
delete t1;
flush();
}
if(pFrameOrg!=NULL)
av_frame_free(&pFrameOrg);
if(pFrameRGB!=NULL)
av_frame_free(&pFrameRGB);
if(pFrameAudio!=NULL)
av_frame_free(&pFrameAudio);
if(pFrameUYVY422!=NULL)
av_frame_free(&pFrameUYVY422);
if(filter_frame!=NULL){
av_frame_free(&filter_frame);
filter_frame=NULL;
}
if(filter_audio_frame!=NULL){
av_frame_free(&filter_audio_frame);
filter_audio_frame=NULL;
}
if(filter_graph!=NULL){
avfilter_graph_free(&filter_graph);
filter_graph=NULL;
}
if(filter_a_graph!=NULL){
avfilter_graph_free(&filter_a_graph);
filter_a_graph=NULL;
}
if(pCodecCtx!=NULL){
avcodec_close(pCodecCtx);
avcodec_free_context(&pCodecCtx);
}
if(aCodecCtx!=NULL){
avcodec_close(aCodecCtx);
avcodec_free_context(&aCodecCtx);
}
if(pFormatCtx!=NULL)
avformat_close_input(&pFormatCtx);
if(audioBuffer!=NULL)
free(audioBuffer);
if(pSwsCtx!=NULL)
sws_freeContext(pSwsCtx);
if(pSwsCtxUYVY422!=NULL)
sws_freeContext(pSwsCtxUYVY422);
if(sws_ctx_audio!=NULL)
swr_free((SwrContext**)&sws_ctx_audio);
if(audioBuffRes!=NULL)
free(audioBuffRes);
if(audioTmpBuf!=NULL)
free(audioTmpBuf);
if(buffer_UYVY422!=NULL){
free(buffer_UYVY422);
}
}Also i notice that i have massive memory leak when i use functions
if(avcodec_open2(pCodecCtx,pCodec,NULL)!=0){
QMessageBox::critical(NULL,"OPEN CODEC","SISTEMSKA GRESKA");
pFile->setHaveFileInfo(false);
return;
}and after i decode first video frame.
When i create 40 instances of this class, and then i delete all of it, 600 MB left in memory.
I would appreciate your help. Thank you very much.
//EDIT
this is place where i put my VideoBuff*void VideoFile::putVideoBuf(videoBuff *buf){
if(buf==NULL&&!videoPackets.isEmpty()){
m1.lock();
qDebug()<<"NULL";
videoBuff *tmpBuf=new videoBuff;
tmpBuf->bufSize=videoPackets.last()->bufSize;
tmpBuf->lineSize=videoPackets.last()->lineSize;
tmpBuf->buffer=malloc(tmpBuf->bufSize);
memcpy(tmpBuf->buffer,videoPackets.last()->buffer,tmpBuf->bufSize);
videoPackets.append(tmpBuf);
m1.unlock();
}else if(buf!=NULL){
this->m1.lock();
videoPackets.append(buf);
this->m1.unlock();
}else{
qDebug()<<"-.-";
}}This is function for pulling my VideoBuf*
videoBuff *VideoFile::getNextVideoBuf(){
if(this->getTvReady()){
if(videoPackets.size()>0&&audioPackets.size()>0){
this->m1.lock();
videoBuff *tmp=NULL;
if(!videoPackets.isEmpty())
tmp=videoPackets.first();
if(tmp==NULL){
this->m1.unlock();
return NULL;
}
if(!videoPackets.isEmpty())
videoPackets.removeFirst();
if(videoTaken>=*numberOfframes)
videoTaken=0;
videoTaken++;
qDebug()<m1.unlock();
return tmp;
}else{
this->removeLastVideoFrames(videoPackets.size());
this->removeLastAudioFrames(audioPackets.size());
currentLive=NULL;
emit finish();
return NULL;
}
}else{
return NULL;
}}This is place where i pull my VideoBuf*
bool DecklinkDevice::ScheduleNextVideoFrame(bool preroling){
iDecklinkOutput->CreateVideoFrame(currentOutput->getWidth(),currentOutput->getHeight(),currentOutput->getWidth()*bytesPerPix,videoFormat,bmdFrameFlagDefault,&m_videoFrame);
if(preroling==false){
if(running==false){
this->mutex.unlock();
return false;
}
}
if(currentOutput==NULL){
this->mutex.unlock();
return false;
}
timeCode=new MyTimeCode;
timeCode->setFps(currentOutput->getFramesPerSec());
void *buf;
m_videoFrame->GetBytes(&buf);
bool isLogo=false;
bool isTimeLogo=false;
VideoFile *f=nextVideoBuf->getVideoFile();
if(f!=NULL){
videoBuff *n_buf=nextVideoBuf->getVideoFile()->getNextVideoBuf();
isLogo=f->getLogo();
isTimeLogo=f->getTimeLogo();
timeCode->setVideoFile(f);
if(n_buf!=NULL){
m_videoFrame->SetTimecode(timeCodeFormat,timeCode);
if(lastVideoBuff!=NULL)
free(lastVideoBuff);
lastVideoBuff=malloc(n_buf->bufSize);
memcpy(buf,n_buf->buffer,n_buf->bufSize);
memcpy(lastVideoBuff,buf,n_buf->bufSize);
lastVideoBuffSize=n_buf->bufSize;
free(n_buf->buffer);
free(n_buf);
}else{
void *buf;
m_videoFrame->GetBytes(&buf);
timeCode->setLast();
memcpy(buf,lastVideoBuff,lastVideoBuffSize);
}
}else{
void *buf;
m_videoFrame->GetBytes(&buf);
if(lastVideoBuff==NULL){
lastVideoBuff=malloc(m_videoFrame->GetWidth()*m_videoFrame->GetHeight()*bytesPerPix);
memset(lastVideoBuff,100,m_videoFrame->GetWidth()*m_videoFrame->GetHeight()*bytesPerPix);
}
memcpy(buf,lastVideoBuff,m_videoFrame->GetWidth()*m_videoFrame->GetHeight()*bytesPerPix);
}
if(alphaSupport()){
int rowId=nextAudioBuf->getSettings()->getLogoPos().y();
uchar *ptr=(uchar*)buf;
if(isLogo){
uchar *pptr=logo->bits();
for(int i=0;ibyteCount();i=i+4){
if(i%logo->bytesPerLine()==0){
rowId++;
}
if(pptr[i+3]!=255)
continue;
int offset=(i%logo->bytesPerLine()+nextVideoBs-Ÿ>getSettings()->getLogoPos().x()*bytesPerPix);
ptr[offset+m_videoFrame->GetRowBytes()*rowId]=pptr[i+3];
ptr[offset+1+m_videoFrame->GetRowBytes()*rowId]=pptr[i+2];
ptr[offset+2+m_videoFrame->GetRowBytes()*rowId]=pptr[i+1];
ptr[offset+3+m_videoFrame->GetRowBytes()*rowId]=pptr[i];
}
}
if(isTimeLogo){
QImage im(m_videoFrame->GetWidth()/5,100,QImage::Format_ARGB32);
QPainter p(&im);
p.setRenderHints(QPainter::RenderHints(QPainter::SmoothPixmapTransform|QPainter::TextAntialiasing|QPainter::HighQualityAntialiasing|QPainter::TextAntialiasing|QPainter::Antialiasing));
p.fillRect(im.rect(),QColor(0,0,0));
QFont f;
f.setFamily(f.defaultFamily());
f.setPointSize(25);
p.setFont(f);
QPen pen;
pen.setColor(Qt::white);
p.setPen(pen);
p.setBrush(QBrush(QColor(255,255,255)));
p.drawText(im.rect(),QTime::currentTime().toString("hh:mm"));
for(int i=0;igetSettings()->getLogoPos().x()*bytesPerPix);
ptr[offset+m_videoFrame->GetRowBytes()*rowId]=im.bits()[i+3];
ptr[offset+1+m_videoFrame->GetRowBytes()*rowId]=im.bits()[i+2];
ptr[offset+2+m_videoFrame->GetRowBytes()*rowId]=im.bits()[i+1];
ptr[offset+3+m_videoFrame->GetRowBytes()*rowId]=im.bits()[i+0];
}
}
}
iDecklinkOutput->ScheduleVideoFrame(m_videoFrame,numberOfVideoFrames*currentOutput->getFrameDuration(),currentOutput->getFrameDuration(),currentOutput->getTimeScale());
numberOfVideoFrames++;
timeCode->update();
return true;}Here ,in this callback function, I free video bits and timecode object ;
HRESULT DLPlaybackDelegate::ScheduledFrameCompleted(IDeckLinkVideoFrame *completedFrame, BMDOutputFrameCompletionResult result){
IDeckLinkTimecode *t=NULL;
completedFrame->GetTimecode(bmdTimecodeVITC,&t);
if(t!=NULL){
MyTimeCode *mt=(MyTimeCode*)t;
VideoFile *f=NULL;
f=mt->getVideoFile();
if(f!=NULL){
f->frameEmited();
}
t->Release();
}
completedFrame->Release();
completedFrame=NULL;
static uint64_t frames=0;
m_device->ScheduleNextVideoFrame(false);
return S_OK;}//edit 12.12.2016
Hello, after i download source code from ffmpeg site, and compile last version of a ffmpeg, i lost memory leak. Now everything works very fine.Also i notice in libsmbclient.c ,ffmpeg source file, that smbc_set_context(libsmbc->ctx) ; is not used in functions that reading from network shares. Without this, program will crash if you try to read multiple files from a multiple threads. Is this a bug ?