
Recherche avancée
Médias (2)
-
Valkaama DVD Label
4 octobre 2011, par
Mis à jour : Février 2013
Langue : English
Type : Image
-
Podcasting Legal guide
16 mai 2011, par
Mis à jour : Mai 2011
Langue : English
Type : Texte
Autres articles (13)
-
Installation en mode ferme
4 février 2011, parLe mode ferme permet d’héberger plusieurs sites de type MediaSPIP en n’installant qu’une seule fois son noyau fonctionnel.
C’est la méthode que nous utilisons sur cette même plateforme.
L’utilisation en mode ferme nécessite de connaïtre un peu le mécanisme de SPIP contrairement à la version standalone qui ne nécessite pas réellement de connaissances spécifique puisque l’espace privé habituel de SPIP n’est plus utilisé.
Dans un premier temps, vous devez avoir installé les mêmes fichiers que l’installation (...) -
Supporting all media types
13 avril 2011, parUnlike most software and media-sharing platforms, MediaSPIP aims to manage as many different media types as possible. The following are just a few examples from an ever-expanding list of supported formats : images : png, gif, jpg, bmp and more audio : MP3, Ogg, Wav and more video : AVI, MP4, OGV, mpg, mov, wmv and more text, code and other data : OpenOffice, Microsoft Office (Word, PowerPoint, Excel), web (html, CSS), LaTeX, Google Earth and (...)
-
Taille des images et des logos définissables
9 février 2011, parDans beaucoup d’endroits du site, logos et images sont redimensionnées pour correspondre aux emplacements définis par les thèmes. L’ensemble des ces tailles pouvant changer d’un thème à un autre peuvent être définies directement dans le thème et éviter ainsi à l’utilisateur de devoir les configurer manuellement après avoir changé l’apparence de son site.
Ces tailles d’images sont également disponibles dans la configuration spécifique de MediaSPIP Core. La taille maximale du logo du site en pixels, on permet (...)
Sur d’autres sites (4528)
-
A Comprehensive Guide to Robust Digital Marketing Analytics
15 novembre 2023, par Erin — Analytics Tips -
What is Audience Segmentation ? The 5 Main Types & Examples
16 novembre 2023, par Erin — Analytics Tips -
Getting and decoding video by RTP H264
29 novembre 2023, par AlekseiKraevParsing RTP H264.
WINAPI C. A queue from C++ has been applied, I repent.


RTP is generated using FFMPEG with the following command :
ffmpeg.exe -f gdigrab -framerate 25 -i desktop -s 853x480 -b:v 120000 -c:v libx264 -f rtp rtp ://127.0.0.1:8080


Parsing the incoming RTP/h264 stream and converting it to an RGB matrix.


video_H264_decode.h


#pragma once
#ifndef _VIDEO_H264_DECODE_SEND_H // Блокируем повторное включение этого модуля
#define _VIDEO_H264_DECODE_SEND_H
//******************************************************************************
// Section include
//******************************************************************************
#include "main.h"
#include 
//******************************************************************************
// Constants
//******************************************************************************

//******************************************************************************
// Type
//******************************************************************************
typedef struct {
 unsigned char* data;
 int size;
}RTPData_DType;

typedef struct 
{
 union
 {
 struct
 {
 char V:2; //Версия
 char P:1; //заполнение
 char X:1; //расширение
 char CC:4; //количество CSRC

 char M:1; //маркер (флаг последнего пакета AU),
 char PT:7; //полезная нагрузка (тип данных носителя полезной нагрузки RTP, H264 = 96)

 short sequence_number; //Порядковый номер: порядковый номер пакета RTP, увеличенный на 1.
 int time_stamp; //временная метка выборки медиа. 
 int SSRC; //Пакет данных имеет одинаковое происхождение.
 };
 unsigned char data[12];
 };
}RTPHeader_DType;
//******************************************************************************
// Global var
//******************************************************************************

//******************************************************************************
// Local function prototype
//******************************************************************************
UCHAR rtp_H264_recive_init(void);
UCHAR RTPStop(void);
//******************************************************************************
// Macros
//******************************************************************************
#define BYTE2_SWAP(X) ((((short)(X) & 0xff00) >> 8) |(((short)(X) & 0x00ff) << 8))
#endif
//******************************************************************************
// ENF OF FILE
//******************************************************************************



video_H264_decode.c


//******************************************************************************
//include
//******************************************************************************
#include "main.h"
/*#include "video_H264_decode.h"
#include 
#include 
#include <chrono>
#include 

#pragma comment(lib, "ws2_32.lib")
#include */
#include <iostream>
#include <queue>

extern "C" {
#include "libavformat/avformat.h"
#include "libavfilter/avfilter.h"
#include "libavdevice/avdevice.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libpostproc/postprocess.h"
#include "libavcodec/avcodec.h"
}

#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avdevice.lib")
#pragma comment(lib,"avfilter.lib")
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avutil.lib")
#pragma comment(lib,"postproc.lib")
#pragma comment(lib,"swresample.lib")
#pragma comment(lib,"swscale.lib")

#pragma warning(disable: 4996)
//******************************************************************************
// Section for determining the variables used in the module
//******************************************************************************
//------------------------------------------------------------------------------
// Global
//------------------------------------------------------------------------------

//------------------------------------------------------------------------------
// Local
//------------------------------------------------------------------------------
const int inteval = 0x01000000;
BOOL FlagRTPActive = TRUE;

HANDLE hMutexRTPRecive;
HANDLE hSemaphoreRTP;
HANDLE hTreadRTPRecive;
HANDLE hTreadRTPDecode;


SOCKET RTPSocket; //socket UDP RTP
RTPData_DType packet;
RTPData_DType FU_buffer = { 0 };

std::queue q;
std::set<int> seq;

AVFormatContext* pAVFormatContext;
AVCodecContext* pAVCodecContext;
const AVCodec* pAVCodec;
AVFrame* pAVFrame;
AVFrame* AVFrameRGG;
SwsContext* pSwsContext;
AVPacket *pAVPacket;
AVCodecParserContext* pAVCodecParserContext;

UINT port;
//******************************************************************************
// Section of prototypes of local functions
//******************************************************************************
DWORD WINAPI rtp_H264_recive_Procedure(CONST LPVOID lpParam);
DWORD WINAPI rtp_decode_Procedure(CONST LPVOID lpParam);
char RTPSocketInit(void);
void RTPPacketParser(void);
char RTPSocketRecive(void);
void Decode_NaluToAVFrameRGG();
//******************************************************************************
// Section of the description of functions
//******************************************************************************
UCHAR rtp_H264_recive_init(void)
{
 hSemaphoreRTP = CreateSemaphore(
 NULL, // default security attributes
 0, // initial count
 1, // maximum count
 NULL); // unnamed semaphore

 hMutexRTPRecive = CreateMutex(
 NULL, // default security attributes
 FALSE, // initially not owned
 NULL); // unnamed mutex

 hTreadRTPRecive = CreateThread(NULL, NULL, rtp_H264_recive_Procedure, NULL, NULL, NULL);
 hTreadRTPDecode = CreateThread(NULL, NULL, rtp_decode_Procedure, NULL, NULL, NULL);

 return 0;
}
//------------------------------------------------------------------------------
UCHAR RTPStop(void)
{
 FlagRTPActive = FALSE;

 if (hSemaphoreRTP) CloseHandle(hSemaphoreRTP);
 if (hMutexRTPRecive) CloseHandle(hMutexRTPRecive);
 if (hTreadRTPRecive) CloseHandle(hTreadRTPRecive);

 closesocket(RTPSocket); 

 return 0;
}
//------------------------------------------------------------------------------
DWORD WINAPI rtp_H264_recive_Procedure(CONST LPVOID lpParam)
{
 while (RTPSocketInit() == 0)
 Sleep(2000);
 
 while (1)
 {
 RTPSocketRecive();
 RTPPacketParser();
 ReleaseSemaphore(hSemaphoreRTP, 1, NULL);
 }
}
//------------------------------------------------------------------------------
DWORD WINAPI rtp_decode_Procedure(CONST LPVOID lpParam)
{
 port = param.Option.VideoPort;

 pAVPacket = av_packet_alloc();
 if (!pAVPacket)
 {
 MessageBox(NULL, L"ERROR Could not allocate pAVPacket", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }
 av_init_packet(pAVPacket);

 /* find the MPEG-1 video decoder */
 pAVCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
 if (!pAVCodec)
 {
 MessageBox(NULL, L"ERROR Codec not found", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }

 pAVCodecParserContext = av_parser_init(pAVCodec->id);
 if (!pAVCodecParserContext)
 {
 MessageBox(NULL, L"ERROR Parser not found", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }

 pAVCodecContext = avcodec_alloc_context3(pAVCodec);
 if (!pAVCodecContext) 
 {
 MessageBox(NULL, L"ERROR Could not allocate video codec context", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }
 
 if (avcodec_open2(pAVCodecContext, pAVCodec, NULL) < 0)
 {
 MessageBox(NULL, L"ERROR Could not open codec", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }

 pAVFrame = av_frame_alloc();
 if (!pAVFrame) 
 {
 MessageBox(NULL, L"ERROR Could not allocate video frame", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 } 
 
 while (FlagRTPActive)
 {
 if(port != param.Option.VideoPort)
 closesocket(RTPSocket);

 WaitForSingleObject(hSemaphoreRTP, 500); 
 Decode_NaluToAVFrameRGG();
 }

 avformat_free_context(pAVFormatContext);
 av_frame_free(&pAVFrame);
 avcodec_close(pAVCodecContext);
 av_packet_free(&pAVPacket);

 
 if (hTreadRTPDecode) CloseHandle(hTreadRTPDecode);

 return 0;
}

//------------------------------------------------------------------------------
char RTPSocketInit(void)
{ 
 sockaddr_in RTPSocketAddr;
 
 RTPSocketAddr.sin_family = AF_INET;
 RTPSocketAddr.sin_addr.s_addr = htonl(INADDR_ANY);
 RTPSocketAddr.sin_port = htons(param.Option.VideoPort);

 RTPSocket = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
 if (RTPSocket == INVALID_SOCKET)
 {
 MessageBox(NULL, L"ERROR Invalid RTP Socket", L"UDP Socket", MB_OK | MB_ICONERROR);
 return 0;
 }

 int option = 12000;
 if (setsockopt(RTPSocket, SOL_SOCKET, SO_RCVBUF, (char*)&option, sizeof(option)) < 0)
 {
 printf("setsockopt failed\n");

 }

 /*option = TRUE;
 if (setsockopt(RTPSocket, SOL_SOCKET, SO_CONDITIONAL_ACCEPT, (char*)&option, sizeof(option)) < 0)
 {
 printf("setsockopt failed\n");

 }*/

 if (bind(RTPSocket, (sockaddr*)&RTPSocketAddr, sizeof(RTPSocketAddr)) == SOCKET_ERROR)
 {
 MessageBox(NULL, L"ERROR bind", L"UDP Socket", MB_OK | MB_ICONERROR);
 closesocket(RTPSocket);
 return 0;
 }
 return 1;
}

//------------------------------------------------------------------------------
char RTPSocketRecive(void)
{
 static char RecvBuf[2000];
 static int BufLen = 2000;

 int iResult = 0;
 struct sockaddr_in SenderAddr;
 int SenderAddrSize = sizeof(SenderAddr);
 TCHAR szErrorMsg[100];

 iResult = recvfrom(RTPSocket, RecvBuf, BufLen, 0, NULL, NULL);
 if (iResult == SOCKET_ERROR && FlagRTPActive == TRUE)
 {
 StringCchPrintf(szErrorMsg, 100, L"ERROR recvfrom Ошибка:%d", WSAGetLastError());
 MessageBox(NULL, szErrorMsg, L"UDP Socket", MB_OK | MB_ICONERROR);
 return -1;
 }

 packet.data = (unsigned char*)RecvBuf;
 packet.size = iResult;

 return 1;
}

//------------------------------------------------------------------------------
void RTPPacketParser(void)
{
 RTPHeader_DType RTPHeader;
 RTPData_DType NALU;
 unsigned char* buffer = packet.data;
 int pos = 0;
 static int count = 0;
 static short lastSequenceNumber = 0xFFFF;
 short type;
 char payload_header;

 //read rtp header
 memcpy(&RTPHeader, buffer, sizeof(RTPHeader));
 RTPHeader.sequence_number = BYTE2_SWAP(RTPHeader.sequence_number);
 pos += 12; 
 
 if (RTPHeader.X) {
 //profile extension
 short define;
 short length;
 length = buffer[pos + 3];//suppose not so long extension
 pos += 4;
 pos += (length * 4);
 }

 payload_header = buffer[pos];
 type = payload_header & 0x1f; //Тип полезной нагрузки RTP
 pos++;
 
 //STAP-A
 if (type == 24)
 { 
 while (pos < packet.size)
 {
 unsigned short NALU_size;
 memcpy(&NALU_size, buffer + pos, 2);
 NALU_size = BYTE2_SWAP(NALU_size);
 pos += 2;
 char NAL_header = buffer[pos];
 short NAL_type = NAL_header & 0x1f;

 if (NAL_type == 7) 
 {
 count++;
 //cout<<"SPS, sequence number: "</cout<<"PPS, sequence number: "</cout<<"end of sequence, sequence number: "< 0) 
 {
 NALU.data = (unsigned char*) malloc(NALU_size + 4);
 NALU.size = NALU_size + 4;
 memcpy(NALU.data, &inteval, 4);
 memcpy(NALU.data + 4, &buffer[pos], NALU_size);

 WaitForSingleObject(hMutexRTPRecive, INFINITE);
 q.push(NALU);
 ReleaseMutex(hMutexRTPRecive);
 }

 pos += NALU_size;
 }
 }
 //FU-A Fragmentation unit
 else if (type == 28)
 { 
 //FU header
 char FU_header = buffer[pos];
 bool fStart = FU_header & 0x80;
 bool fEnd = FU_header & 0x40;

 //NAL header
 char NAL_header = (payload_header & 0xe0) | (FU_header & 0x1f);
 short NAL_type = FU_header & 0x1f;
 if (NAL_type == 7) 
 {
 count++;
 //SPS
 }
 else if (NAL_type == 8) 
 {
 //PPS
 }
 else if (NAL_type == 10) 
 {
 //end of sequence
 }
 pos++;

 int size = packet.size - pos;
 
 if (count > 0)
 {
 if (fStart) 
 {
 if (FU_buffer.size != 0)
 {
 free(FU_buffer.data);
 FU_buffer.size = 0;
 }
 FU_buffer.data = (unsigned char*)malloc(size + 5);
 if (FU_buffer.data == NULL)
 return;
 FU_buffer.size = size + 5;
 memcpy(FU_buffer.data, &inteval, 4);
 memcpy(FU_buffer.data + 4, &NAL_header, 1);
 memcpy(FU_buffer.data + 5, buffer + pos, size);
 }
 else
 {
 unsigned char* temp = (unsigned char*)malloc(FU_buffer.size + size);
 memcpy(temp, FU_buffer.data, FU_buffer.size);
 memcpy(temp + FU_buffer.size, buffer + pos, size);
 if (FU_buffer.size != 0) free(FU_buffer.data);
 FU_buffer.data = temp;
 FU_buffer.size += size;
 }

 if (fEnd)
 {
 NALU.data = (unsigned char*)malloc(FU_buffer.size);
 NALU.size = FU_buffer.size;
 memcpy(NALU.data, FU_buffer.data, FU_buffer.size);

 WaitForSingleObject(hMutexRTPRecive, INFINITE);
 q.push(NALU);
 ReleaseMutex(hMutexRTPRecive);

 free(FU_buffer.data);
 FU_buffer.size = 0;
 }
 }
 
 }
 else 
 {
 //other type
 short NAL_type = type;
 if (NAL_type == 7) 
 {
 count++;
 //SPS
 }
 else if (NAL_type == 8) 
 {
 //PPS
 }
 else if (NAL_type == 10) 
 {
 //end of sequence
 }

 int size = packet.size - pos + 1;
 if (count > 0)
 {
 NALU.data = (unsigned char*)malloc(size+4);
 NALU.size = size + 4;
 memcpy(NALU.data, &inteval, 4);
 memcpy(NALU.data + 4, &buffer[12], size);

 WaitForSingleObject(hMutexRTPRecive, INFINITE);
 q.push(NALU);
 ReleaseMutex(hMutexRTPRecive);
 }
 }
}

//------------------------------------------------------------------------------
void Decode_NaluToAVFrameRGG()
{
 unsigned char cntNALU;
 int len = 0;
 static int size = 0;
 unsigned char* data = NULL;
 int ret;
 RTPData_DType NALU;

 av_frame_unref(pAVFrame);
 av_packet_unref(pAVPacket);

 while(1)
 {
 WaitForSingleObject(hMutexRTPRecive, INFINITE);
 if (q.empty())
 {
 ReleaseMutex(hMutexRTPRecive);
 break;
 }
 NALU = q.front();
 q.pop();
 ReleaseMutex(hMutexRTPRecive);

 data = NALU.data;
 size = NALU.size;

 while(size)
 { 
 len = av_parser_parse2(pAVCodecParserContext, pAVCodecContext, &pAVPacket->data, &pAVPacket->size,
 (uint8_t*)data, size,
 AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);

 data = len ? data + len : data;
 size -= len;
 
 if (pAVPacket->size)
 {
 ret = avcodec_send_packet(pAVCodecContext, pAVPacket);

 if(ret == AVERROR_EOF)
 MessageBox(NULL, L"the codec has been fully flushed, and there will be no more output frames", L"avcodec_send_packet", MB_OK | MB_ICONERROR);

 if (ret < 0)
 {
 MessageBox(NULL, L"ERROR sending a packet for decoding", L"Decode", MB_OK | MB_ICONERROR);
 //exit(1);
 }

 while (ret >= 0) 
 {
 ret = avcodec_receive_frame(pAVCodecContext, pAVFrame);
 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
 break;
 else if (ret < 0) {
 MessageBox(NULL, L"ERROR during decoding", L"Decode", MB_OK | MB_ICONERROR);
 //exit(1);
 }
 
 AVFrameRGG = av_frame_alloc(); 
 pSwsContext = sws_getContext(pAVCodecContext->width, pAVCodecContext->height, pAVCodecContext->pix_fmt,
 pAVCodecContext->width, pAVCodecContext->height, AV_PIX_FMT_RGB24, SWS_SPLINE,
 0, 0, 0);

 sws_scale_frame(pSwsContext, AVFrameRGG, pAVFrame);
 
 ImgBufferSet(AVFrameRGG->data[0], pAVCodecContext->height, pAVCodecContext->width, AVFrameRGG->linesize[0]);

 sws_freeContext(pSwsContext);
 av_frame_free(&AVFrameRGG);
 }
 }
 }
 free(NALU.data);
 NALU.size = 0;
 } 
}

</int></queue></iostream></chrono>


It's DECIDED