
Recherche avancée
Médias (1)
-
Sintel MP4 Surround 5.1 Full
13 mai 2011, par
Mis à jour : Février 2012
Langue : English
Type : Video
Autres articles (56)
-
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir -
Des sites réalisés avec MediaSPIP
2 mai 2011, parCette page présente quelques-uns des sites fonctionnant sous MediaSPIP.
Vous pouvez bien entendu ajouter le votre grâce au formulaire en bas de page. -
Support audio et vidéo HTML5
10 avril 2011MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...)
Sur d’autres sites (5503)
-
Conversion Rate Optimisation Statistics for 2024 and Beyond
21 novembre 2023, par Erin — Analytics Tips -
Getting and decoding video by RTP H264
29 novembre 2023, par AlekseiKraevParsing RTP H264.
WINAPI C. A queue from C++ has been applied, I repent.


RTP is generated using FFMPEG with the following command :
ffmpeg.exe -f gdigrab -framerate 25 -i desktop -s 853x480 -b:v 120000 -c:v libx264 -f rtp rtp ://127.0.0.1:8080


Parsing the incoming RTP/h264 stream and converting it to an RGB matrix.


video_H264_decode.h


#pragma once
#ifndef _VIDEO_H264_DECODE_SEND_H // Блокируем повторное включение этого модуля
#define _VIDEO_H264_DECODE_SEND_H
//******************************************************************************
// Section include
//******************************************************************************
#include "main.h"
#include 
//******************************************************************************
// Constants
//******************************************************************************

//******************************************************************************
// Type
//******************************************************************************
typedef struct {
 unsigned char* data;
 int size;
}RTPData_DType;

typedef struct 
{
 union
 {
 struct
 {
 char V:2; //Версия
 char P:1; //заполнение
 char X:1; //расширение
 char CC:4; //количество CSRC

 char M:1; //маркер (флаг последнего пакета AU),
 char PT:7; //полезная нагрузка (тип данных носителя полезной нагрузки RTP, H264 = 96)

 short sequence_number; //Порядковый номер: порядковый номер пакета RTP, увеличенный на 1.
 int time_stamp; //временная метка выборки медиа. 
 int SSRC; //Пакет данных имеет одинаковое происхождение.
 };
 unsigned char data[12];
 };
}RTPHeader_DType;
//******************************************************************************
// Global var
//******************************************************************************

//******************************************************************************
// Local function prototype
//******************************************************************************
UCHAR rtp_H264_recive_init(void);
UCHAR RTPStop(void);
//******************************************************************************
// Macros
//******************************************************************************
#define BYTE2_SWAP(X) ((((short)(X) & 0xff00) >> 8) |(((short)(X) & 0x00ff) << 8))
#endif
//******************************************************************************
// ENF OF FILE
//******************************************************************************



video_H264_decode.c


//******************************************************************************
//include
//******************************************************************************
#include "main.h"
/*#include "video_H264_decode.h"
#include 
#include 
#include <chrono>
#include 

#pragma comment(lib, "ws2_32.lib")
#include */
#include <iostream>
#include <queue>

extern "C" {
#include "libavformat/avformat.h"
#include "libavfilter/avfilter.h"
#include "libavdevice/avdevice.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libpostproc/postprocess.h"
#include "libavcodec/avcodec.h"
}

#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avdevice.lib")
#pragma comment(lib,"avfilter.lib")
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avutil.lib")
#pragma comment(lib,"postproc.lib")
#pragma comment(lib,"swresample.lib")
#pragma comment(lib,"swscale.lib")

#pragma warning(disable: 4996)
//******************************************************************************
// Section for determining the variables used in the module
//******************************************************************************
//------------------------------------------------------------------------------
// Global
//------------------------------------------------------------------------------

//------------------------------------------------------------------------------
// Local
//------------------------------------------------------------------------------
const int inteval = 0x01000000;
BOOL FlagRTPActive = TRUE;

HANDLE hMutexRTPRecive;
HANDLE hSemaphoreRTP;
HANDLE hTreadRTPRecive;
HANDLE hTreadRTPDecode;


SOCKET RTPSocket; //socket UDP RTP
RTPData_DType packet;
RTPData_DType FU_buffer = { 0 };

std::queue q;
std::set<int> seq;

AVFormatContext* pAVFormatContext;
AVCodecContext* pAVCodecContext;
const AVCodec* pAVCodec;
AVFrame* pAVFrame;
AVFrame* AVFrameRGG;
SwsContext* pSwsContext;
AVPacket *pAVPacket;
AVCodecParserContext* pAVCodecParserContext;

UINT port;
//******************************************************************************
// Section of prototypes of local functions
//******************************************************************************
DWORD WINAPI rtp_H264_recive_Procedure(CONST LPVOID lpParam);
DWORD WINAPI rtp_decode_Procedure(CONST LPVOID lpParam);
char RTPSocketInit(void);
void RTPPacketParser(void);
char RTPSocketRecive(void);
void Decode_NaluToAVFrameRGG();
//******************************************************************************
// Section of the description of functions
//******************************************************************************
UCHAR rtp_H264_recive_init(void)
{
 hSemaphoreRTP = CreateSemaphore(
 NULL, // default security attributes
 0, // initial count
 1, // maximum count
 NULL); // unnamed semaphore

 hMutexRTPRecive = CreateMutex(
 NULL, // default security attributes
 FALSE, // initially not owned
 NULL); // unnamed mutex

 hTreadRTPRecive = CreateThread(NULL, NULL, rtp_H264_recive_Procedure, NULL, NULL, NULL);
 hTreadRTPDecode = CreateThread(NULL, NULL, rtp_decode_Procedure, NULL, NULL, NULL);

 return 0;
}
//------------------------------------------------------------------------------
UCHAR RTPStop(void)
{
 FlagRTPActive = FALSE;

 if (hSemaphoreRTP) CloseHandle(hSemaphoreRTP);
 if (hMutexRTPRecive) CloseHandle(hMutexRTPRecive);
 if (hTreadRTPRecive) CloseHandle(hTreadRTPRecive);

 closesocket(RTPSocket); 

 return 0;
}
//------------------------------------------------------------------------------
DWORD WINAPI rtp_H264_recive_Procedure(CONST LPVOID lpParam)
{
 while (RTPSocketInit() == 0)
 Sleep(2000);
 
 while (1)
 {
 RTPSocketRecive();
 RTPPacketParser();
 ReleaseSemaphore(hSemaphoreRTP, 1, NULL);
 }
}
//------------------------------------------------------------------------------
DWORD WINAPI rtp_decode_Procedure(CONST LPVOID lpParam)
{
 port = param.Option.VideoPort;

 pAVPacket = av_packet_alloc();
 if (!pAVPacket)
 {
 MessageBox(NULL, L"ERROR Could not allocate pAVPacket", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }
 av_init_packet(pAVPacket);

 /* find the MPEG-1 video decoder */
 pAVCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
 if (!pAVCodec)
 {
 MessageBox(NULL, L"ERROR Codec not found", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }

 pAVCodecParserContext = av_parser_init(pAVCodec->id);
 if (!pAVCodecParserContext)
 {
 MessageBox(NULL, L"ERROR Parser not found", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }

 pAVCodecContext = avcodec_alloc_context3(pAVCodec);
 if (!pAVCodecContext) 
 {
 MessageBox(NULL, L"ERROR Could not allocate video codec context", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }
 
 if (avcodec_open2(pAVCodecContext, pAVCodec, NULL) < 0)
 {
 MessageBox(NULL, L"ERROR Could not open codec", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 }

 pAVFrame = av_frame_alloc();
 if (!pAVFrame) 
 {
 MessageBox(NULL, L"ERROR Could not allocate video frame", L"Init decoder error", MB_OK | MB_ICONERROR);
 exit(1);
 } 
 
 while (FlagRTPActive)
 {
 if(port != param.Option.VideoPort)
 closesocket(RTPSocket);

 WaitForSingleObject(hSemaphoreRTP, 500); 
 Decode_NaluToAVFrameRGG();
 }

 avformat_free_context(pAVFormatContext);
 av_frame_free(&pAVFrame);
 avcodec_close(pAVCodecContext);
 av_packet_free(&pAVPacket);

 
 if (hTreadRTPDecode) CloseHandle(hTreadRTPDecode);

 return 0;
}

//------------------------------------------------------------------------------
char RTPSocketInit(void)
{ 
 sockaddr_in RTPSocketAddr;
 
 RTPSocketAddr.sin_family = AF_INET;
 RTPSocketAddr.sin_addr.s_addr = htonl(INADDR_ANY);
 RTPSocketAddr.sin_port = htons(param.Option.VideoPort);

 RTPSocket = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
 if (RTPSocket == INVALID_SOCKET)
 {
 MessageBox(NULL, L"ERROR Invalid RTP Socket", L"UDP Socket", MB_OK | MB_ICONERROR);
 return 0;
 }

 int option = 12000;
 if (setsockopt(RTPSocket, SOL_SOCKET, SO_RCVBUF, (char*)&option, sizeof(option)) < 0)
 {
 printf("setsockopt failed\n");

 }

 /*option = TRUE;
 if (setsockopt(RTPSocket, SOL_SOCKET, SO_CONDITIONAL_ACCEPT, (char*)&option, sizeof(option)) < 0)
 {
 printf("setsockopt failed\n");

 }*/

 if (bind(RTPSocket, (sockaddr*)&RTPSocketAddr, sizeof(RTPSocketAddr)) == SOCKET_ERROR)
 {
 MessageBox(NULL, L"ERROR bind", L"UDP Socket", MB_OK | MB_ICONERROR);
 closesocket(RTPSocket);
 return 0;
 }
 return 1;
}

//------------------------------------------------------------------------------
char RTPSocketRecive(void)
{
 static char RecvBuf[2000];
 static int BufLen = 2000;

 int iResult = 0;
 struct sockaddr_in SenderAddr;
 int SenderAddrSize = sizeof(SenderAddr);
 TCHAR szErrorMsg[100];

 iResult = recvfrom(RTPSocket, RecvBuf, BufLen, 0, NULL, NULL);
 if (iResult == SOCKET_ERROR && FlagRTPActive == TRUE)
 {
 StringCchPrintf(szErrorMsg, 100, L"ERROR recvfrom Ошибка:%d", WSAGetLastError());
 MessageBox(NULL, szErrorMsg, L"UDP Socket", MB_OK | MB_ICONERROR);
 return -1;
 }

 packet.data = (unsigned char*)RecvBuf;
 packet.size = iResult;

 return 1;
}

//------------------------------------------------------------------------------
void RTPPacketParser(void)
{
 RTPHeader_DType RTPHeader;
 RTPData_DType NALU;
 unsigned char* buffer = packet.data;
 int pos = 0;
 static int count = 0;
 static short lastSequenceNumber = 0xFFFF;
 short type;
 char payload_header;

 //read rtp header
 memcpy(&RTPHeader, buffer, sizeof(RTPHeader));
 RTPHeader.sequence_number = BYTE2_SWAP(RTPHeader.sequence_number);
 pos += 12; 
 
 if (RTPHeader.X) {
 //profile extension
 short define;
 short length;
 length = buffer[pos + 3];//suppose not so long extension
 pos += 4;
 pos += (length * 4);
 }

 payload_header = buffer[pos];
 type = payload_header & 0x1f; //Тип полезной нагрузки RTP
 pos++;
 
 //STAP-A
 if (type == 24)
 { 
 while (pos < packet.size)
 {
 unsigned short NALU_size;
 memcpy(&NALU_size, buffer + pos, 2);
 NALU_size = BYTE2_SWAP(NALU_size);
 pos += 2;
 char NAL_header = buffer[pos];
 short NAL_type = NAL_header & 0x1f;

 if (NAL_type == 7) 
 {
 count++;
 //cout<<"SPS, sequence number: "</cout<<"PPS, sequence number: "</cout<<"end of sequence, sequence number: "< 0) 
 {
 NALU.data = (unsigned char*) malloc(NALU_size + 4);
 NALU.size = NALU_size + 4;
 memcpy(NALU.data, &inteval, 4);
 memcpy(NALU.data + 4, &buffer[pos], NALU_size);

 WaitForSingleObject(hMutexRTPRecive, INFINITE);
 q.push(NALU);
 ReleaseMutex(hMutexRTPRecive);
 }

 pos += NALU_size;
 }
 }
 //FU-A Fragmentation unit
 else if (type == 28)
 { 
 //FU header
 char FU_header = buffer[pos];
 bool fStart = FU_header & 0x80;
 bool fEnd = FU_header & 0x40;

 //NAL header
 char NAL_header = (payload_header & 0xe0) | (FU_header & 0x1f);
 short NAL_type = FU_header & 0x1f;
 if (NAL_type == 7) 
 {
 count++;
 //SPS
 }
 else if (NAL_type == 8) 
 {
 //PPS
 }
 else if (NAL_type == 10) 
 {
 //end of sequence
 }
 pos++;

 int size = packet.size - pos;
 
 if (count > 0)
 {
 if (fStart) 
 {
 if (FU_buffer.size != 0)
 {
 free(FU_buffer.data);
 FU_buffer.size = 0;
 }
 FU_buffer.data = (unsigned char*)malloc(size + 5);
 if (FU_buffer.data == NULL)
 return;
 FU_buffer.size = size + 5;
 memcpy(FU_buffer.data, &inteval, 4);
 memcpy(FU_buffer.data + 4, &NAL_header, 1);
 memcpy(FU_buffer.data + 5, buffer + pos, size);
 }
 else
 {
 unsigned char* temp = (unsigned char*)malloc(FU_buffer.size + size);
 memcpy(temp, FU_buffer.data, FU_buffer.size);
 memcpy(temp + FU_buffer.size, buffer + pos, size);
 if (FU_buffer.size != 0) free(FU_buffer.data);
 FU_buffer.data = temp;
 FU_buffer.size += size;
 }

 if (fEnd)
 {
 NALU.data = (unsigned char*)malloc(FU_buffer.size);
 NALU.size = FU_buffer.size;
 memcpy(NALU.data, FU_buffer.data, FU_buffer.size);

 WaitForSingleObject(hMutexRTPRecive, INFINITE);
 q.push(NALU);
 ReleaseMutex(hMutexRTPRecive);

 free(FU_buffer.data);
 FU_buffer.size = 0;
 }
 }
 
 }
 else 
 {
 //other type
 short NAL_type = type;
 if (NAL_type == 7) 
 {
 count++;
 //SPS
 }
 else if (NAL_type == 8) 
 {
 //PPS
 }
 else if (NAL_type == 10) 
 {
 //end of sequence
 }

 int size = packet.size - pos + 1;
 if (count > 0)
 {
 NALU.data = (unsigned char*)malloc(size+4);
 NALU.size = size + 4;
 memcpy(NALU.data, &inteval, 4);
 memcpy(NALU.data + 4, &buffer[12], size);

 WaitForSingleObject(hMutexRTPRecive, INFINITE);
 q.push(NALU);
 ReleaseMutex(hMutexRTPRecive);
 }
 }
}

//------------------------------------------------------------------------------
void Decode_NaluToAVFrameRGG()
{
 unsigned char cntNALU;
 int len = 0;
 static int size = 0;
 unsigned char* data = NULL;
 int ret;
 RTPData_DType NALU;

 av_frame_unref(pAVFrame);
 av_packet_unref(pAVPacket);

 while(1)
 {
 WaitForSingleObject(hMutexRTPRecive, INFINITE);
 if (q.empty())
 {
 ReleaseMutex(hMutexRTPRecive);
 break;
 }
 NALU = q.front();
 q.pop();
 ReleaseMutex(hMutexRTPRecive);

 data = NALU.data;
 size = NALU.size;

 while(size)
 { 
 len = av_parser_parse2(pAVCodecParserContext, pAVCodecContext, &pAVPacket->data, &pAVPacket->size,
 (uint8_t*)data, size,
 AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);

 data = len ? data + len : data;
 size -= len;
 
 if (pAVPacket->size)
 {
 ret = avcodec_send_packet(pAVCodecContext, pAVPacket);

 if(ret == AVERROR_EOF)
 MessageBox(NULL, L"the codec has been fully flushed, and there will be no more output frames", L"avcodec_send_packet", MB_OK | MB_ICONERROR);

 if (ret < 0)
 {
 MessageBox(NULL, L"ERROR sending a packet for decoding", L"Decode", MB_OK | MB_ICONERROR);
 //exit(1);
 }

 while (ret >= 0) 
 {
 ret = avcodec_receive_frame(pAVCodecContext, pAVFrame);
 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
 break;
 else if (ret < 0) {
 MessageBox(NULL, L"ERROR during decoding", L"Decode", MB_OK | MB_ICONERROR);
 //exit(1);
 }
 
 AVFrameRGG = av_frame_alloc(); 
 pSwsContext = sws_getContext(pAVCodecContext->width, pAVCodecContext->height, pAVCodecContext->pix_fmt,
 pAVCodecContext->width, pAVCodecContext->height, AV_PIX_FMT_RGB24, SWS_SPLINE,
 0, 0, 0);

 sws_scale_frame(pSwsContext, AVFrameRGG, pAVFrame);
 
 ImgBufferSet(AVFrameRGG->data[0], pAVCodecContext->height, pAVCodecContext->width, AVFrameRGG->linesize[0]);

 sws_freeContext(pSwsContext);
 av_frame_free(&AVFrameRGG);
 }
 }
 }
 free(NALU.data);
 NALU.size = 0;
 } 
}

</int></queue></iostream></chrono>


It's DECIDED


-
What is Audience Segmentation ? The 5 Main Types & Examples
16 novembre 2023, par Erin — Analytics Tips