
Recherche avancée
Médias (1)
-
Bug de détection d’ogg
22 mars 2013, par
Mis à jour : Avril 2013
Langue : français
Type : Video
Autres articles (67)
-
MediaSPIP v0.2
21 juin 2013, parMediaSPIP 0.2 est la première version de MediaSPIP stable.
Sa date de sortie officielle est le 21 juin 2013 et est annoncée ici.
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Comme pour la version précédente, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
MediaSPIP version 0.1 Beta
16 avril 2011, parMediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
Contribute to translation
13 avril 2011You can help us to improve the language used in the software interface to make MediaSPIP more accessible and user-friendly. You can also translate the interface into any language that allows it to spread to new linguistic communities.
To do this, we use the translation interface of SPIP where the all the language modules of MediaSPIP are available. Just subscribe to the mailing list and request further informantion on translation.
MediaSPIP is currently available in French and English (...)
Sur d’autres sites (6252)
-
can write frames to rtsp server, but can't display them in the ffplay or live555 client
11 octobre 2016, par tankyxI am working on a zero latency streaming server, using ffmpeg libraries and I am facing a problem.
My server is working when using nvenc, I can streaming successfully to my client, which is another computer on LAN. But if I change my encoder to use the libx264 (in order to reduce the latency), the server still write the frames, but the client is facing problems with the sdp header, and more specifically, the media subsession does not seem to be initialized. Therefore, the client crashes.The thing is, when I dump the sdp header when using nvenc and libx264, it is actually the same in both case.
Here is the code I have done to initialize my encoder :
/*
Init the codec that is used to encode the video.
Init the output format context (aka RTSP uri).
*/
FfmpegEncoder::FfmpegEncoder(char *url)
{
AVRational tmp_time_base;
AVDictionary* options = NULL;
this->pCodec = avcodec_find_encoder_by_name("libx264");
if (this->pCodec == NULL)
throw myExceptions("Error: Can't initialize the encoder. FfmpegEncoder.cpp l:9\n");
this->pCodecCtx = avcodec_alloc_context3(this->pCodec);
//Alloc output context
if (avformat_alloc_output_context2(&outFormatCtx, NULL, "rtsp", url) < 0)
throw myExceptions("Error: Can't alloc stream output. FfmpegEncoder.cpp l:17\n");
this->st = avformat_new_stream(this->outFormatCtx, this->pCodec);
this->st->id = this->outFormatCtx->nb_streams - 1;
if (this->st == NULL)
throw myExceptions("Error: Can't create stream . FfmpegEncoder.cpp l:22\n");
//Define the framerate of the output. The numerator should stay 1. Denumerator is the framerate we are aiming for.
tmp_time_base.num = 1;
tmp_time_base.den = 60;
//TODO : parse these values
this->pCodecCtx->bit_rate = 5000000;
this->pCodecCtx->width = 1280;
this->pCodecCtx->height = 720;
//This set the fps. 60fps at this point.
this->pCodecCtx->time_base = tmp_time_base;
this->st->time_base = tmp_time_base;
//Add a intra frame every 12 frames
this->pCodecCtx->gop_size = 10;
this->pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
av_opt_set(this->pCodecCtx, "tune", "zerolatency", 0);
av_opt_set(this->pCodecCtx, "vprofile", "main", 0);
av_opt_set(this->pCodecCtx, "preset", "faster", 0);
//Open Codec, using the context + x264 options
if (avcodec_open2(this->pCodecCtx, this->pCodec, &options) < 0)
throw myExceptions("Error: Can't open the codec. FfmpegEncoder.cpp l:43\n");
if (avcodec_copy_context(this->st->codec, this->pCodecCtx) != 0) {
throw myExceptions("Error : Can't copy codec context. FfmpegEncoder.cpp : l.46");
}
av_dump_format(this->outFormatCtx, 0, url, 1);
//write the header needed to start the stream.
if (avformat_write_header(this->outFormatCtx, NULL) != 0)
throw myExceptions("Error: failed to connect to RTSP server. FfmpegEncoder.cpp l:48\n");
} -
Segfault while trying to fill the yuv image for rtsp streaming
21 septembre 2016, par tankyxI am capturing the video stream from a window, and I want to restream it to my rtsp proxy server. However, it seems I can’t write the frame properly, but I can show the said frame in a SDL window. Here is my code :
int StreamHandler::storeStreamData()
{
// Allocate video frame
pFrame = av_frame_alloc();
// Allocate an AVFrame structure
pFrameRGB = av_frame_alloc();
if (pFrameRGB == NULL)
throw myExceptions("Error : Can't alloc the frame.");
// Determine required buffer size and allocate buffer
numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
pCodecCtx->height);
buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
// Assign appropriate parts of buffer to image planes in pFrameRGB
avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height);
//InitSdlDrawBack();
// initialize SWS context for software scaling
sws_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
SWS_LANCZOS,
NULL,
NULL,
NULL
);
SetPixelArray();
FfmpegEncoder enc("rtsp://127.0.0.1:1935/live/myStream");
i = 0;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
if (packet.stream_index == videoindex) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) {
i++;
//DrawFrame();
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
enc.encodeFrame(pFrameRGB, i);
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}
// Free the RGB image
av_free(buffer);
av_frame_free(&pFrameRGB);
// Free the YUV frame
av_frame_free(&pFrame);
// Close the codecs
avcodec_close(pCodecCtx);
avcodec_close(pCodecCtxOrig);
// Close the video file
avformat_close_input(&pFormatCtx);
return 0;
}
void StreamHandler::SetPixelArray()
{
yPlaneSz = pCodecCtx->width * pCodecCtx->height;
uvPlaneSz = pCodecCtx->width * pCodecCtx->height / 4;
yPlane = (Uint8*)malloc(yPlaneSz);
uPlane = (Uint8*)malloc(uvPlaneSz);
vPlane = (Uint8*)malloc(uvPlaneSz);
if (!yPlane || !uPlane || !vPlane)
throw myExceptions("Error : Can't create pixel array.");
uvPitch = pCodecCtx->width / 2;
}Here I fill the YUV image and write the packet.
void FfmpegEncoder::encodeFrame(AVFrame * frame, int frameCount)
{
AVPacket pkt = { 0 };
int got_pkt;
av_init_packet(&pkt);
frame->pts = frameCount;
FillYuvImage(frame, frameCount, this->pCodecCtx->width, this->pCodecCtx->height);
if (avcodec_encode_video2(this->pCodecCtx, &pkt, frame, &got_pkt) < 0)
throw myExceptions("Error: failed to encode the frame. FfmpegEncoder.cpp l:61\n");
//if the frame is well encoded
if (got_pkt) {
pkt.stream_index = this->st->index;
pkt.pts = av_rescale_q_rnd(pkt.pts, this->pCodecCtx->time_base, this->st->time_base, AVRounding(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
if (av_write_frame(this->outFormatCtx, &pkt) < 0)
throw myExceptions("Error: failed to write video frame. FfmpegEncoder.cpp l:68\n");
}
}
void FfmpegEncoder::FillYuvImage(AVFrame * pict, int frame_index, int width, int height)
{
int x, y, i;
i = frame_index;
for (y = 0; y < height; y++)
{
for (x = 0; x < width / 2; x++)
pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;
}
for (y = 0; y < height; y++)
{
for (x = 0; x < width / 2; x++)
{
pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;
pict->data[2][y * pict->linesize[2] + x] = 64 + y + i * 5; //segault here
}
}
}The "FillYuvImage" method is copied from a FFMPEG example, but It does not work for me. If I don’t call it, the "av_write_frame" function won’t work (segfault too).
EDIT : Here is my output context and codec initialization.
FfmpegEncoder::FfmpegEncoder(char *url)
{
AVRational tmp_time_base;
AVDictionary* options = NULL;
this->pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (this->pCodec == NULL)
throw myExceptions("Error: Can't initialize the encoder. FfmpegEncoder.cpp l:9\n");
this->pCodecCtx = avcodec_alloc_context3(this->pCodec);
//Alloc output context
if (avformat_alloc_output_context2(&outFormatCtx, NULL, "rtsp", url) < 0)
throw myExceptions("Error: Can't alloc stream output. FfmpegEncoder.cpp l:17\n");
this->st = avformat_new_stream(this->outFormatCtx, this->pCodec);
if (this->st == NULL)
throw myExceptions("Error: Can't create stream . FfmpegEncoder.cpp l:22\n");
av_dict_set(&options, "vprofile", "main", 0);
av_dict_set(&options, "tune", "zerolatency", 0);
tmp_time_base.num = 1;
tmp_time_base.den = 60;
//TODO : parse these values
this->pCodecCtx->bit_rate = 3000000;
this->pCodecCtx->width = 1280;
this->pCodecCtx->height = 720;
//This set the fps. 60fps at this point.
this->pCodecCtx->time_base = tmp_time_base;
//Add a intra frame every 12 frames
this->pCodecCtx->gop_size = 12;
this->pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
//Open Codec, using the context + x264 options
if (avcodec_open2(this->pCodecCtx, this->pCodec, &options) < 0)
throw myExceptions("Error: Can't open the codec. FfmpegEncoder.cpp l:43\n");
if (avcodec_copy_context(this->st->codec, this->pCodecCtx) != 0) {
throw myExceptions("Error : Can't copy codec context. FfmpegEncoder.cpp : l.46");
}
av_dump_format(this->outFormatCtx, 0, url, 1);
if (avformat_write_header(this->outFormatCtx, NULL) != 0)
throw myExceptions("Error: failed to connect to RTSP server. FfmpegEncoder.cpp l:48\n");
} -
FFMPEG Can write the header to the RTSP server but not the frame
27 septembre 2016, par tankyxI am trying to capture the content of a window, and stream it to a rtsp server (wowza). I can write the header (avformat_write_header) and I see that the wowza server receive a connection. But when it comes to writing the frame, av_write_frame just triggers a segfault. It is quite interesting as the addresse of the variable that the method is trying to access is normal (0x0000000033921D20)
here is the code :
FfmpegEncoder::FfmpegEncoder(char *url)
{
AVRational tmp_time_base;
AVDictionary* options = NULL;
this->pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (this->pCodec == NULL)
throw myExceptions("Error: Can't initialize the encoder. FfmpegEncoder.cpp l:9\n");
this->pCodecCtx = avcodec_alloc_context3(this->pCodec);
//Alloc output context
if (avformat_alloc_output_context2(&outFormatCtx, NULL, "rtsp", url) < 0)
throw myExceptions("Error: Can't alloc stream output. FfmpegEncoder.cpp l:17\n");
this->st = avformat_new_stream(this->outFormatCtx, this->pCodec);
this->st->id = this->outFormatCtx->nb_streams - 1;
if (this->st == NULL)
throw myExceptions("Error: Can't create stream . FfmpegEncoder.cpp l:22\n");
av_dict_set(&options, "vprofile", "main", 0);
av_dict_set(&options, "tune", "zerolatency", 0);
//Define the framerate of the output. The numerator should stay 1. Denumerator is the framerate we are aiming for.
tmp_time_base.num = 1;
tmp_time_base.den = 60;
//TODO : parse these values
this->pCodecCtx->bit_rate = 3000000;
this->pCodecCtx->width = 1280;
this->pCodecCtx->height = 720;
//This set the fps. 60fps at this point.
this->pCodecCtx->time_base = tmp_time_base;
//Add a intra frame every 12 frames
this->pCodecCtx->gop_size = 12;
this->pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
//Open Codec, using the context + x264 options
if (avcodec_open2(this->pCodecCtx, this->pCodec, &options) < 0)
throw myExceptions("Error: Can't open the codec. FfmpegEncoder.cpp l:43\n");
if (avcodec_copy_context(this->st->codec, this->pCodecCtx) != 0) {
throw myExceptions("Error : Can't copy codec context. FfmpegEncoder.cpp : l.46");
}
av_dump_format(this->outFormatCtx, 0, url, 1);
//write the header needed to start the stream.
if (avformat_write_header(this->outFormatCtx, NULL) != 0)
throw myExceptions("Error: failed to connect to RTSP server. FfmpegEncoder.cpp l:48\n");
}
/*Encode the frame in order to send it over rtsp.*/
void FfmpegEncoder::encodeFrame(AVFrame * frame, int frameCount)
{
AVPacket pkt = { 0 };
SwsContext *ctx;
int got_pkt;
frame->pts = frameCount;
FillYuvImage(frame, frameCount, this->pCodecCtx->width, this->pCodecCtx->height);
if (avcodec_send_frame(this->pCodecCtx, frame) != 0)
throw myExceptions("Error: failed to send video frame to codec. FfmpegEncoder.cpp l:89\n");
if (avcodec_receive_packet(this->pCodecCtx, &pkt) != 0)
throw myExceptions("Error: failed to receive video frame from codec. FfmpegEncoder.cpp l:91\n");
pkt.stream_index = this->st->index;
pkt.pts = av_rescale_q_rnd(pkt.pts, this->pCodecCtx->time_base, this->st->time_base, AVRounding(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
std::cout << "stream index = " << pkt.stream_index << " pts = " << pkt.pts << std::endl;
try {
av_write_frame(this->outFormatCtx, &pkt);
}
catch (myExceptions &e)
{
std::cout << e.what() << std::endl;
return;
}
}
/*Magic function from ffmpeg examples. It fills the frame to respect the YUV format.*/
void FfmpegEncoder::FillYuvImage(AVFrame * pict, int frame_index, int width, int height)
{
int x, y, i;
i = frame_index;
pict->format = AV_PIX_FMT_YUV420P;
pict->width = 1280;
pict->height = 720;
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;
}
for (y = 0; y < height / 2; y++)
{
for (x = 0; x < width / 2; x++)
{
pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;
pict->data[2][y * pict->linesize[2] + x] = 64 + y + i * 5;
}
}
}Here is my code where I retrieve the frame. I can show the frame successfully on a SDL window.
while (av_read_frame(pFormatCtx, &packet) >= 0) {
if (packet.stream_index == videoindex) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) {
i++;
DrawFrame();
enc.encodeFrame(pFrameRGB, i);
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}