
Recherche avancée
Médias (3)
-
Exemple de boutons d’action pour une collection collaborative
27 février 2013, par
Mis à jour : Mars 2013
Langue : français
Type : Image
-
Exemple de boutons d’action pour une collection personnelle
27 février 2013, par
Mis à jour : Février 2013
Langue : English
Type : Image
-
Collections - Formulaire de création rapide
19 février 2013, par
Mis à jour : Février 2013
Langue : français
Type : Image
Autres articles (50)
-
Keeping control of your media in your hands
13 avril 2011, parThe vocabulary used on this site and around MediaSPIP in general, aims to avoid reference to Web 2.0 and the companies that profit from media-sharing.
While using MediaSPIP, you are invited to avoid using words like "Brand", "Cloud" and "Market".
MediaSPIP is designed to facilitate the sharing of creative media online, while allowing authors to retain complete control of their work.
MediaSPIP aims to be accessible to as many people as possible and development is based on expanding the (...) -
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...)
Sur d’autres sites (11042)
-
ffmpeg live stream recording stops on stream interrupt
3 septembre 2018, par Anton AntonovI have to record live stream with ffmmpeg. It is easy. Here is my command line :
ffmpeg -i /source/address> -s 320x240 -r 25 -c:v libx264 -g 125 -vb 300000 -maxrate 400000 -bufsize 600000 -profile:v baseline -deinterlace -preset veryfast -ac 2 -ar 48000 -c:a libfdk_aac -b:a 64000 -f mp4 to/dst/file>
But my task is to make ffmpeg to stop and exit when live stream is stopped.
Is there easy way to do this ?
-
FFMPEG libav gdigrab capturing with wrong colors
7 mars 2018, par user1496491I’m capturing screen with code below, and it gets me the picture with wrong colors.
The picture on left is raw data which I assumed in ARGB the picture in right is encoded as YUV. I’ve tried different formats, the pictures slighly changing, but it’s never looks ow it should be. In what format gdigrab gives its output ? What’s the right way to encode it ?
#include "MainWindow.h"
#include <qguiapplication>
#include <qlabel>
#include <qscreen>
#include <qtimer>
#include <qlayout>
#include <qimage>
#include <qtconcurrent></qtconcurrent>QtConcurrent>
#include <qthreadpool>
#include "ScreenCapture.h"
MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent)
{
resize(800, 600);
label = new QLabel();
label->setAlignment(Qt::AlignHCenter | Qt::AlignVCenter);
auto layout = new QHBoxLayout();
layout->addWidget(label);
auto widget = new QWidget();
widget->setLayout(layout);
setCentralWidget(widget);
init();
initOutFile();
collectFrame();
}
MainWindow::~MainWindow()
{
avformat_close_input(&inputFormatContext);
avformat_free_context(inputFormatContext);
QThreadPool::globalInstance()->waitForDone();
}
void MainWindow::init()
{
av_register_all();
avcodec_register_all();
avdevice_register_all();
auto screen = QGuiApplication::screens()[1];
QRect geometry = screen->geometry();
inputFormatContext = avformat_alloc_context();
AVDictionary* options = NULL;
av_dict_set(&options, "framerate", "30", NULL);
av_dict_set(&options, "offset_x", QString::number(geometry.x()).toLatin1().data(), NULL);
av_dict_set(&options, "offset_y", QString::number(geometry.y()).toLatin1().data(), NULL);
av_dict_set(&options, "preset", "ultrafast", NULL);
av_dict_set(&options, "probesize", "10MB", NULL);
av_dict_set(&options, "pix_fmt", "yuv420p", NULL);
av_dict_set(&options, "video_size", QString(QString::number(geometry.width()) + "x" + QString::number(geometry.height())).toLatin1().data(), NULL);
AVInputFormat* inputFormat = av_find_input_format("gdigrab");
avformat_open_input(&inputFormatContext, "desktop", inputFormat, &options);
// AVDictionary* options = NULL;
// av_dict_set(&options, "framerate", "30", NULL);
// av_dict_set(&options, "preset", "ultrafast", NULL);
// av_dict_set(&options, "vcodec", "h264", NULL);
// av_dict_set(&options, "s", "1280x720", NULL);
// av_dict_set(&options, "crf", "0", NULL);
// av_dict_set(&options, "rtbufsize", "100M", NULL);
// AVInputFormat *format = av_find_input_format("dshow");
// avformat_open_input(&inputFormatContext, "video=screen-capture-recorder", format, &options);
av_dict_free(&options);
avformat_find_stream_info(inputFormatContext, NULL);
videoStreamIndex = av_find_best_stream(inputFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
inputCodec = avcodec_find_decoder(inputFormatContext->streams[videoStreamIndex]->codecpar->codec_id);
if(!inputCodec) qDebug() << "Не найден кодек входящего потока!";
inputCodecContext = avcodec_alloc_context3(inputCodec);
inputCodecContext->codec_id = inputCodec->id;
avcodec_parameters_to_context(inputCodecContext, inputFormatContext->streams[videoStreamIndex]->codecpar);
if(avcodec_open2(inputCodecContext, inputCodec, NULL)) qDebug() << "Не удалось открыть входной кодек!";
}
void MainWindow::initOutFile()
{
const char* filename = "C:/Temp/output.mp4";
if(avformat_alloc_output_context2(&outFormatContext, NULL, NULL, filename) < 0) qDebug() << "Не удалось создать выходной контекст!";
outCodec = avcodec_find_encoder(AV_CODEC_ID_MPEG4);
if(!outCodec) qDebug() << "Не удалось найти кодек!";
videoStream = avformat_new_stream(outFormatContext, outCodec);
videoStream->time_base = {1, 30};
const AVPixelFormat* pixelFormat = outCodec->pix_fmts;
while (*pixelFormat != AV_PIX_FMT_NONE)
{
qDebug() << "OUT_FORMAT" << av_get_pix_fmt_name(*pixelFormat);
++pixelFormat;
}
outCodecContext = videoStream->codec;
outCodecContext->bit_rate = 400000;
outCodecContext->width = inputCodecContext->width;
outCodecContext->height = inputCodecContext->height;
outCodecContext->time_base = videoStream->time_base;
outCodecContext->gop_size = 10;
outCodecContext->max_b_frames = 1;
outCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
if (outFormatContext->oformat->flags & AVFMT_GLOBALHEADER) outCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
if(avcodec_open2(outCodecContext, outCodec, NULL)) qDebug() << "Не удалось открыть выходной кодек!";
swsContext = sws_getContext(inputCodecContext->width,
inputCodecContext->height,
// inputCodecContext->pix_fmt,
AV_PIX_FMT_ABGR,
outCodecContext->width,
outCodecContext->height,
outCodecContext->pix_fmt,
SWS_BICUBIC, NULL, NULL, NULL);
if(avio_open(&outFormatContext->pb, filename, AVIO_FLAG_WRITE) < 0) qDebug() << "Не удалось открыть файл!";
if(avformat_write_header(outFormatContext, NULL) < 0) qDebug() << "Не удалось записать заголовок!";
}
void MainWindow::collectFrame()
{
AVFrame* inFrame = av_frame_alloc();
inFrame->format = inputCodecContext->pix_fmt;
inFrame->width = inputCodecContext->width;
inFrame->height = inputCodecContext->height;
int size = av_image_alloc(inFrame->data, inFrame->linesize, inFrame->width, inFrame->height, inputCodecContext->pix_fmt, 1);
qDebug() << size;
AVFrame* outFrame = av_frame_alloc();
outFrame->format = outCodecContext->pix_fmt;
outFrame->width = outCodecContext->width;
outFrame->height = outCodecContext->height;
qDebug() << av_image_alloc(outFrame->data, outFrame->linesize, outFrame->width, outFrame->height, outCodecContext->pix_fmt, 1);
AVPacket packet;
av_init_packet(&packet);
av_read_frame(inputFormatContext, &packet);
// while(av_read_frame(inputFormatContext, &packet) >= 0)
// {
if(packet.stream_index == videoStream->index)
{
memcpy(inFrame->data[0], packet.data, size);
sws_scale(swsContext, inFrame->data, inFrame->linesize, 0, inputCodecContext->height, outFrame->data, outFrame->linesize);
QImage image(inFrame->data[0], inFrame->width, inFrame->height, QImage::Format_ARGB32);
label->setPixmap(QPixmap::fromImage(image).scaled(label->size(), Qt::KeepAspectRatio));
AVPacket outPacket;
av_init_packet(&outPacket);
int encodeResult = avcodec_receive_packet(outCodecContext, &outPacket);
while(encodeResult == AVERROR(EAGAIN))
{
if(avcodec_send_frame(outCodecContext, outFrame)) qDebug() << "Ошибка отправки фрейма на кодирование!";
encodeResult = avcodec_receive_packet(outCodecContext, &outPacket);
}
if(encodeResult != 0) qDebug() << "Ошибка во время кодирования!" << encodeResult;
if(outPacket.pts != AV_NOPTS_VALUE) outPacket.pts = av_rescale_q(outPacket.pts, videoStream->codec->time_base, videoStream->time_base);
if(outPacket.dts != AV_NOPTS_VALUE) outPacket.dts = av_rescale_q(outPacket.dts, videoStream->codec->time_base, videoStream->time_base);
av_write_frame(outFormatContext, &outPacket);
av_packet_unref(&outPacket);
}
// }
av_packet_unref(&packet);
av_write_trailer(outFormatContext);
avio_close(outFormatContext->pb);
}
</qthreadpool></qimage></qlayout></qtimer></qscreen></qlabel></qguiapplication> -
swscaler bad src image pointers
7 mars 2018, par user1496491I’m completely lost. I’m trying to capture 30 screenshots and put them into a video with
FFMPEG
under Windows 10. And it keeps telling me that[swscaler @ 073890a0] bad src image pointers
. As a result the video is entirely green. If I change format todshow
usingvideo=screen-capture-recorder
the video looks to be mostly garbage. Here’s my short code for that. I’m completely stuck and don’t know even in which direction to look.MainWindow.h
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <qmainwindow>
#include <qfuture>
#include <qfuturewatcher>
#include <qmutex>
#include <qmutexlocker>
extern "C" {
#include "libavcodec/avcodec.h"
#include "libavcodec/avfft.h"
#include "libavdevice/avdevice.h"
#include "libavfilter/avfilter.h"
#include "libavfilter/avfiltergraph.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
#include "libavformat/avformat.h"
#include "libavformat/avio.h"
#include "libavutil/opt.h"
#include "libavutil/common.h"
#include "libavutil/channel_layout.h"
#include "libavutil/imgutils.h"
#include "libavutil/mathematics.h"
#include "libavutil/samplefmt.h"
#include "libavutil/time.h"
#include "libavutil/opt.h"
#include "libavutil/pixdesc.h"
#include "libavutil/file.h"
#include "libswscale/swscale.h"
}
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
MainWindow(QWidget *parent = 0);
~MainWindow();
private:
AVFormatContext *inputFormatContext = nullptr;
AVFormatContext *outFormatContext = nullptr;
AVStream* videoStream = nullptr;
AVDictionary* options = nullptr;
AVCodec* outCodec = nullptr;
AVCodec* inputCodec = nullptr;
AVCodecContext* inputCodecContext = nullptr;
AVCodecContext* outCodecContext = nullptr;
SwsContext* swsContext = nullptr;
private:
void init();
void initOutFile();
void collectFrame();
};
#endif // MAINWINDOW_H
</qmutexlocker></qmutex></qfuturewatcher></qfuture></qmainwindow>MainWindow.cpp
#include "MainWindow.h"
#include <qguiapplication>
#include <qlabel>
#include <qscreen>
#include <qtimer>
#include <qlayout>
#include <qimage>
#include <qtconcurrent></qtconcurrent>QtConcurrent>
#include <qthreadpool>
#include "ScreenCapture.h"
MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent)
{
resize(800, 600);
auto label = new QLabel();
label->setAlignment(Qt::AlignHCenter | Qt::AlignVCenter);
auto layout = new QHBoxLayout();
layout->addWidget(label);
auto widget = new QWidget();
widget->setLayout(layout);
setCentralWidget(widget);
init();
initOutFile();
collectFrame();
}
MainWindow::~MainWindow()
{
avformat_close_input(&inputFormatContext);
avformat_free_context(inputFormatContext);
QThreadPool::globalInstance()->waitForDone();
}
void MainWindow::init()
{
av_register_all();
avcodec_register_all();
avdevice_register_all();
avformat_network_init();
auto screen = QGuiApplication::screens()[0];
QRect geometry = screen->geometry();
inputFormatContext = avformat_alloc_context();
options = NULL;
av_dict_set(&options, "framerate", "30", NULL);
av_dict_set(&options, "offset_x", QString::number(geometry.x()).toLatin1().data(), NULL);
av_dict_set(&options, "offset_y", QString::number(geometry.y()).toLatin1().data(), NULL);
av_dict_set(&options, "video_size", QString(QString::number(geometry.width()) + "x" + QString::number(geometry.height())).toLatin1().data(), NULL);
av_dict_set(&options, "show_region", "1", NULL);
AVInputFormat* inputFormat = av_find_input_format("gdigrab");
avformat_open_input(&inputFormatContext, "desktop", inputFormat, &options);
int videoStreamIndex = av_find_best_stream(inputFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
inputCodecContext = inputFormatContext->streams[videoStreamIndex]->codec;
inputCodecContext->width = geometry.width();
inputCodecContext->height = geometry.height();
inputCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
inputCodec = avcodec_find_decoder(inputCodecContext->codec_id);
avcodec_open2(inputCodecContext, inputCodec, NULL);
}
void MainWindow::initOutFile()
{
const char* filename = "C:/Temp/output.mp4";
avformat_alloc_output_context2(&outFormatContext, NULL, NULL, filename);
outCodec = avcodec_find_encoder(AV_CODEC_ID_MPEG4);
videoStream = avformat_new_stream(outFormatContext, outCodec);
videoStream->time_base = {1, 30};
outCodecContext = videoStream->codec;
outCodecContext->codec_id = AV_CODEC_ID_MPEG4;
outCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
outCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
outCodecContext->bit_rate = 400000;
outCodecContext->width = inputCodecContext->width;
outCodecContext->height = inputCodecContext->height;
outCodecContext->gop_size = 3;
outCodecContext->max_b_frames = 2;
outCodecContext->time_base = videoStream->time_base;
if (outFormatContext->oformat->flags & AVFMT_GLOBALHEADER)
outCodecContext->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
avcodec_open2(outCodecContext, outCodec, NULL);
if (!(outFormatContext->flags & AVFMT_NOFILE))
avio_open2(&outFormatContext->pb, filename, AVIO_FLAG_WRITE, NULL, NULL);
swsContext = sws_getContext(inputCodecContext->width,
inputCodecContext->height,
inputCodecContext->pix_fmt,
outCodecContext->width,
outCodecContext->height,
outCodecContext->pix_fmt,
SWS_BICUBIC, NULL, NULL, NULL);
avformat_write_header(outFormatContext, &options);
}
void MainWindow::collectFrame()
{
AVFrame* frame = av_frame_alloc();
frame->data[0] = NULL;
frame->width = inputCodecContext->width;
frame->height = inputCodecContext->height;
frame->format = inputCodecContext->pix_fmt;
av_image_alloc(frame->data, frame->linesize, inputCodecContext->width, inputCodecContext->height, (AVPixelFormat)frame->format, 32);
AVFrame* outFrame = av_frame_alloc();
outFrame->data[0] = NULL;
outFrame->width = outCodecContext->width;
outFrame->height = outCodecContext->height;
outFrame->format = outCodecContext->pix_fmt;
av_image_alloc(outFrame->data, outFrame->linesize, outCodecContext->width, outCodecContext->height, (AVPixelFormat)outFrame->format, 32);
int bufferSize = av_image_get_buffer_size(outCodecContext->pix_fmt,
outCodecContext->width,
outCodecContext->height,
24);
uint8_t* outBuffer = (uint8_t*)av_malloc(bufferSize);
avpicture_fill((AVPicture*)outFrame, outBuffer,
AV_PIX_FMT_YUV420P,
outCodecContext->width, outCodecContext->height);
int frameCount = 30;
int count = 0;
AVPacket* packet = (AVPacket*)av_malloc(sizeof(AVPacket));
av_init_packet(packet);
while(av_read_frame(inputFormatContext, packet) >= 0)
{
if(packet->stream_index == videoStream->index)
{
int frameFinished = 0;
avcodec_decode_video2(inputCodecContext, frame, &frameFinished, packet);
if(frameFinished)
{
if(++count > frameCount)
{
qDebug() << "FINISHED!";
break;
}
sws_scale(swsContext, frame->data, frame->linesize, 0, inputCodecContext->height, outFrame->data, outFrame->linesize);
AVPacket outPacket;
av_init_packet(&outPacket);
outPacket.data = NULL;
outPacket.size = 0;
int got_picture = 0;
avcodec_encode_video2(outCodecContext, &outPacket, outFrame, &got_picture);
if(got_picture)
{
if(outPacket.pts != AV_NOPTS_VALUE) outPacket.pts = av_rescale_q(outPacket.pts, videoStream->codec->time_base, videoStream->time_base);
if(outPacket.dts != AV_NOPTS_VALUE) outPacket.dts = av_rescale_q(outPacket.dts, videoStream->codec->time_base, videoStream->time_base);
av_write_frame(outFormatContext , &outPacket);
}
av_packet_unref(&outPacket);
}
}
}
av_write_trailer(outFormatContext);
av_free(outBuffer);
}
</qthreadpool></qimage></qlayout></qtimer></qscreen></qlabel></qguiapplication>