
Recherche avancée
Autres articles (96)
-
MediaSPIP 0.1 Beta version
25 avril 2011, parMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
Multilang : améliorer l’interface pour les blocs multilingues
18 février 2011, parMultilang est un plugin supplémentaire qui n’est pas activé par défaut lors de l’initialisation de MediaSPIP.
Après son activation, une préconfiguration est mise en place automatiquement par MediaSPIP init permettant à la nouvelle fonctionnalité d’être automatiquement opérationnelle. Il n’est donc pas obligatoire de passer par une étape de configuration pour cela. -
ANNEXE : Les plugins utilisés spécifiquement pour la ferme
5 mars 2010, parLe site central/maître de la ferme a besoin d’utiliser plusieurs plugins supplémentaires vis à vis des canaux pour son bon fonctionnement. le plugin Gestion de la mutualisation ; le plugin inscription3 pour gérer les inscriptions et les demandes de création d’instance de mutualisation dès l’inscription des utilisateurs ; le plugin verifier qui fournit une API de vérification des champs (utilisé par inscription3) ; le plugin champs extras v2 nécessité par inscription3 (...)
Sur d’autres sites (10925)
-
Getting green screen in ffplay : Streaming desktop (DirectX surface) as H264 video over RTP stream using Live555
7 novembre 2019, par RamI’m trying to stream the desktop(DirectX surface in NV12 format) as H264 video over RTP stream using Live555 & Windows media foundation’s hardware encoder on Windows10, and expecting it to be rendered by ffplay (ffmpeg 4.2). But only getting a green screen like below,
I referred MFWebCamToRTP mediafoundation-sample & Encoding DirectX surface using hardware MFT for implementing live555’s FramedSource and changing the input source to DirectX surface instead of webCam.
Here is an excerpt of my implementation for Live555’s doGetNextFrame callback to feed input samples from directX surface :
virtual void doGetNextFrame()
{
if (!_isInitialised)
{
if (!initialise()) {
printf("Video device initialisation failed, stopping.");
return;
}
else {
_isInitialised = true;
}
}
//if (!isCurrentlyAwaitingData()) return;
DWORD processOutputStatus = 0;
HRESULT mftProcessOutput = S_OK;
MFT_OUTPUT_STREAM_INFO StreamInfo;
IMFMediaBuffer *pBuffer = NULL;
IMFSample *mftOutSample = NULL;
DWORD mftOutFlags;
bool frameSent = false;
bool bTimeout = false;
// Create sample
CComPtr<imfsample> videoSample = NULL;
// Create buffer
CComPtr<imfmediabuffer> inputBuffer;
// Get next event
CComPtr<imfmediaevent> event;
HRESULT hr = eventGen->GetEvent(0, &event);
CHECK_HR(hr, "Failed to get next event");
MediaEventType eventType;
hr = event->GetType(&eventType);
CHECK_HR(hr, "Failed to get event type");
switch (eventType)
{
case METransformNeedInput:
{
hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), surface, 0, FALSE, &inputBuffer);
CHECK_HR(hr, "Failed to create IMFMediaBuffer");
hr = MFCreateSample(&videoSample);
CHECK_HR(hr, "Failed to create IMFSample");
hr = videoSample->AddBuffer(inputBuffer);
CHECK_HR(hr, "Failed to add buffer to IMFSample");
if (videoSample)
{
_frameCount++;
CHECK_HR(videoSample->SetSampleTime(mTimeStamp), "Error setting the video sample time.\n");
CHECK_HR(videoSample->SetSampleDuration(VIDEO_FRAME_DURATION), "Error getting video sample duration.\n");
// Pass the video sample to the H.264 transform.
hr = _pTransform->ProcessInput(inputStreamID, videoSample, 0);
CHECK_HR(hr, "The resampler H264 ProcessInput call failed.\n");
mTimeStamp += VIDEO_FRAME_DURATION;
}
}
break;
case METransformHaveOutput:
{
CHECK_HR(_pTransform->GetOutputStatus(&mftOutFlags), "H264 MFT GetOutputStatus failed.\n");
if (mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY)
{
MFT_OUTPUT_DATA_BUFFER _outputDataBuffer;
memset(&_outputDataBuffer, 0, sizeof _outputDataBuffer);
_outputDataBuffer.dwStreamID = outputStreamID;
_outputDataBuffer.dwStatus = 0;
_outputDataBuffer.pEvents = NULL;
_outputDataBuffer.pSample = nullptr;
mftProcessOutput = _pTransform->ProcessOutput(0, 1, &_outputDataBuffer, &processOutputStatus);
if (mftProcessOutput != MF_E_TRANSFORM_NEED_MORE_INPUT)
{
if (_outputDataBuffer.pSample) {
//CHECK_HR(_outputDataBuffer.pSample->SetSampleTime(mTimeStamp), "Error setting MFT sample time.\n");
//CHECK_HR(_outputDataBuffer.pSample->SetSampleDuration(VIDEO_FRAME_DURATION), "Error setting MFT sample duration.\n");
IMFMediaBuffer *buf = NULL;
DWORD bufLength;
CHECK_HR(_outputDataBuffer.pSample->ConvertToContiguousBuffer(&buf), "ConvertToContiguousBuffer failed.\n");
CHECK_HR(buf->GetCurrentLength(&bufLength), "Get buffer length failed.\n");
BYTE * rawBuffer = NULL;
fFrameSize = bufLength;
fDurationInMicroseconds = 0;
gettimeofday(&fPresentationTime, NULL);
buf->Lock(&rawBuffer, NULL, NULL);
memmove(fTo, rawBuffer, fFrameSize);
FramedSource::afterGetting(this);
buf->Unlock();
SafeRelease(&buf);
frameSent = true;
_lastSendAt = GetTickCount();
_outputDataBuffer.pSample->Release();
}
if (_outputDataBuffer.pEvents)
_outputDataBuffer.pEvents->Release();
}
//SafeRelease(&pBuffer);
//SafeRelease(&mftOutSample);
break;
}
}
break;
}
if (!frameSent)
{
envir().taskScheduler().triggerEvent(eventTriggerId, this);
}
return;
done:
printf("MediaFoundationH264LiveSource doGetNextFrame failed.\n");
envir().taskScheduler().triggerEvent(eventTriggerId, this);
}
</imfmediaevent></imfmediabuffer></imfsample>Initialise method :
bool initialise()
{
HRESULT hr;
D3D11_TEXTURE2D_DESC desc = { 0 };
HDESK CurrentDesktop = nullptr;
CurrentDesktop = OpenInputDesktop(0, FALSE, GENERIC_ALL);
if (!CurrentDesktop)
{
// We do not have access to the desktop so request a retry
return false;
}
// Attach desktop to this thread
bool DesktopAttached = SetThreadDesktop(CurrentDesktop) != 0;
CloseDesktop(CurrentDesktop);
CurrentDesktop = nullptr;
if (!DesktopAttached)
{
printf("SetThreadDesktop failed\n");
}
UINT32 activateCount = 0;
// h264 output
MFT_REGISTER_TYPE_INFO info = { MFMediaType_Video, MFVideoFormat_H264 };
UINT32 flags =
MFT_ENUM_FLAG_HARDWARE |
MFT_ENUM_FLAG_SORTANDFILTER;
// ------------------------------------------------------------------------
// Initialize D3D11
// ------------------------------------------------------------------------
// Driver types supported
D3D_DRIVER_TYPE DriverTypes[] =
{
D3D_DRIVER_TYPE_HARDWARE,
D3D_DRIVER_TYPE_WARP,
D3D_DRIVER_TYPE_REFERENCE,
};
UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
// Feature levels supported
D3D_FEATURE_LEVEL FeatureLevels[] =
{
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_1
};
UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
D3D_FEATURE_LEVEL FeatureLevel;
// Create device
for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex)
{
hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr,
D3D11_CREATE_DEVICE_VIDEO_SUPPORT,
FeatureLevels, NumFeatureLevels, D3D11_SDK_VERSION, &device, &FeatureLevel, &context);
if (SUCCEEDED(hr))
{
// Device creation success, no need to loop anymore
break;
}
}
CHECK_HR(hr, "Failed to create device");
// Create device manager
UINT resetToken;
hr = MFCreateDXGIDeviceManager(&resetToken, &deviceManager);
CHECK_HR(hr, "Failed to create DXGIDeviceManager");
hr = deviceManager->ResetDevice(device, resetToken);
CHECK_HR(hr, "Failed to assign D3D device to device manager");
// ------------------------------------------------------------------------
// Create surface
// ------------------------------------------------------------------------
desc.Format = DXGI_FORMAT_NV12;
desc.Width = surfaceWidth;
desc.Height = surfaceHeight;
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.SampleDesc.Count = 1;
hr = device->CreateTexture2D(&desc, NULL, &surface);
CHECK_HR(hr, "Could not create surface");
hr = MFTEnumEx(
MFT_CATEGORY_VIDEO_ENCODER,
flags,
NULL,
&info,
&activateRaw,
&activateCount
);
CHECK_HR(hr, "Failed to enumerate MFTs");
CHECK(activateCount, "No MFTs found");
// Choose the first available encoder
activate = activateRaw[0];
for (UINT32 i = 0; i < activateCount; i++)
activateRaw[i]->Release();
// Activate
hr = activate->ActivateObject(IID_PPV_ARGS(&_pTransform));
CHECK_HR(hr, "Failed to activate MFT");
// Get attributes
hr = _pTransform->GetAttributes(&attributes);
CHECK_HR(hr, "Failed to get MFT attributes");
// Unlock the transform for async use and get event generator
hr = attributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE);
CHECK_HR(hr, "Failed to unlock MFT");
eventGen = _pTransform;
CHECK(eventGen, "Failed to QI for event generator");
// Get stream IDs (expect 1 input and 1 output stream)
hr = _pTransform->GetStreamIDs(1, &inputStreamID, 1, &outputStreamID);
if (hr == E_NOTIMPL)
{
inputStreamID = 0;
outputStreamID = 0;
hr = S_OK;
}
CHECK_HR(hr, "Failed to get stream IDs");
// ------------------------------------------------------------------------
// Configure hardware encoder MFT
// ------------------------------------------------------------------------
CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, reinterpret_cast(deviceManager.p)), "Failed to set device manager.\n");
// Set low latency hint
hr = attributes->SetUINT32(MF_LOW_LATENCY, TRUE);
CHECK_HR(hr, "Failed to set MF_LOW_LATENCY");
hr = MFCreateMediaType(&outputType);
CHECK_HR(hr, "Failed to create media type");
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
CHECK_HR(hr, "Failed to set MF_MT_MAJOR_TYPE on H264 output media type");
hr = outputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
CHECK_HR(hr, "Failed to set MF_MT_SUBTYPE on H264 output media type");
hr = outputType->SetUINT32(MF_MT_AVG_BITRATE, TARGET_AVERAGE_BIT_RATE);
CHECK_HR(hr, "Failed to set average bit rate on H264 output media type");
hr = MFSetAttributeSize(outputType, MF_MT_FRAME_SIZE, desc.Width, desc.Height);
CHECK_HR(hr, "Failed to set frame size on H264 MFT out type");
hr = MFSetAttributeRatio(outputType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1);
CHECK_HR(hr, "Failed to set frame rate on H264 MFT out type");
hr = outputType->SetUINT32(MF_MT_INTERLACE_MODE, 2);
CHECK_HR(hr, "Failed to set MF_MT_INTERLACE_MODE on H.264 encoder MFT");
hr = outputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
CHECK_HR(hr, "Failed to set MF_MT_ALL_SAMPLES_INDEPENDENT on H.264 encoder MFT");
hr = _pTransform->SetOutputType(outputStreamID, outputType, 0);
CHECK_HR(hr, "Failed to set output media type on H.264 encoder MFT");
hr = MFCreateMediaType(&inputType);
CHECK_HR(hr, "Failed to create media type");
for (DWORD i = 0;; i++)
{
inputType = nullptr;
hr = _pTransform->GetInputAvailableType(inputStreamID, i, &inputType);
CHECK_HR(hr, "Failed to get input type");
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
CHECK_HR(hr, "Failed to set MF_MT_MAJOR_TYPE on H264 MFT input type");
hr = inputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
CHECK_HR(hr, "Failed to set MF_MT_SUBTYPE on H264 MFT input type");
hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, desc.Width, desc.Height);
CHECK_HR(hr, "Failed to set MF_MT_FRAME_SIZE on H264 MFT input type");
hr = MFSetAttributeRatio(inputType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1);
CHECK_HR(hr, "Failed to set MF_MT_FRAME_RATE on H264 MFT input type");
hr = _pTransform->SetInputType(inputStreamID, inputType, 0);
CHECK_HR(hr, "Failed to set input type");
break;
}
CheckHardwareSupport();
CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL), "Failed to process FLUSH command on H.264 MFT.\n");
CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL), "Failed to process BEGIN_STREAMING command on H.264 MFT.\n");
CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL), "Failed to process START_OF_STREAM command on H.264 MFT.\n");
return true;
done:
printf("MediaFoundationH264LiveSource initialisation failed.\n");
return false;
}
HRESULT CheckHardwareSupport()
{
IMFAttributes *attributes;
HRESULT hr = _pTransform->GetAttributes(&attributes);
UINT32 dxva = 0;
if (SUCCEEDED(hr))
{
hr = attributes->GetUINT32(MF_SA_D3D11_AWARE, &dxva);
}
if (SUCCEEDED(hr))
{
hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
}
#if defined(CODECAPI_AVLowLatencyMode) // Win8 only
hr = _pTransform->QueryInterface(IID_PPV_ARGS(&mpCodecAPI));
if (SUCCEEDED(hr))
{
VARIANT var = { 0 };
// FIXME: encoder only
var.vt = VT_UI4;
var.ulVal = 0;
hr = mpCodecAPI->SetValue(&CODECAPI_AVEncMPVDefaultBPictureCount, &var);
var.vt = VT_BOOL;
var.boolVal = VARIANT_TRUE;
hr = mpCodecAPI->SetValue(&CODECAPI_AVEncCommonLowLatency, &var);
hr = mpCodecAPI->SetValue(&CODECAPI_AVEncCommonRealTime, &var);
hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
if (SUCCEEDED(hr))
{
var.vt = VT_UI4;
var.ulVal = eAVEncCommonRateControlMode_Quality;
hr = mpCodecAPI->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
// This property controls the quality level when the encoder is not using a constrained bit rate. The AVEncCommonRateControlMode property determines whether the bit rate is constrained.
VARIANT quality;
InitVariantFromUInt32(50, &quality);
hr = mpCodecAPI->SetValue(&CODECAPI_AVEncCommonQuality, &quality);
}
}
#endif
return hr;
}ffplay command :
ffplay -protocol_whitelist file,udp,rtp -i test.sdp -x 800 -y 600 -profile:v baseline
SDP :
v=0
o=- 0 0 IN IP4 127.0.0.1
s=No Name
t=0 0
c=IN IP4 127.0.0.1
m=video 1234 RTP/AVP 96
a=rtpmap:96 H264/90000
a=fmtp:96 packetization-mode=1I don’t know what am I missing, I have been trying to fix this for almost a week without any progress, and tried almost everything I could. Also, the online resources for encoding a DirectX surface as video are very limited.
Any help would be appreciated.
-
C++-How to capture MJPEG stream image from ip camera (Not H264 stream) [on hold]
25 juillet 2017, par ngân phạmI know that many IP cameras support both image streaming (ex : MJPEG) and H264 video streaming. I also know how to use OpenCV to capture H264 video. But I don’t know if VideoCapture class in OpenCV can also capture image from MJPEG stream. Or I have to use another library like FFmpeg or Libvlc. My camera is HIKvision DS-2CD2T42FWD-I8.
Thanks in advance -
Ffmpeg live streaming to Youtube - "Connect streaming software to start preview"
10 septembre 2019, par Zoltan FedorI have an rtsp feed (H.265) which I am trying to stream to YouTube Live, but whatever I do YouTube Studio just showing the waiting icon with "Connect streaming software to start preview".
.sh file for streaming :
VBR="1000k"
FPS="30"
QUAL="ultrafast"
YOUTUBE_URL="rtmp://b.rtmp.youtube.com/live2"
SOURCE="rtsp://qhatever.org:8555/whatever.sdp"
KEY="xxxx-xxxx-xxxx-xxxx"
~/ffmpeg-git-20190905-amd64-static/ffmpeg \
-thread_queue_size 512 \
-rtsp_transport tcp -i "$SOURCE" \
-f lavfi -i anullsrc \
-vcodec libx264 -pix_fmt yuvj420p -preset $QUAL -r $FPS -g $(($FPS * 2)) -b:v 2250k -minrate $VBR -maxrate 6000k -bufsize 6000k -keyint_min 60 \
-acodec libmp3lame -ar 44100 -b:a 128k \
-f flv "$YOUTUBE_URL/$KEY"Ffmpeg’s output :
ffmpeg version N-49800-g2b66c757d6-static https://johnvansickle.com/ffmpeg/ Copyright (c) 2000-2019 the FFmpeg developers
built with gcc 6.3.0 (Debian 6.3.0-18+deb9u1) 20170516
configuration: --enable-gpl --enable-version3 --enable-static --disable-debug --disable-ffplay --disable-indev=sndio --disable-outdev=sndio --cc=gcc-6 --enable-fontconfig --enable-frei0r --enable-gnutls --enable-gmp --enable-libgme --enable-gray --enable-libaom --enable-libfribidi --enable-libass --enable-libvmaf --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-librubberband --enable-libsoxr --enable-libspeex --enable-libsrt --enable-libvorbis --enable-libopus --enable-libtheora --enable-libvidstab --enable-libvo-amrwbenc --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxml2 --enable-libdav1d --enable-libxvid --enable-libzvbi --enable-libzimg
libavutil 56. 35.100 / 56. 35.100
libavcodec 58. 56.101 / 58. 56.101
libavformat 58. 32.104 / 58. 32.104
libavdevice 58. 9.100 / 58. 9.100
libavfilter 7. 58.102 / 7. 58.102
libswscale 5. 6.100 / 5. 6.100
libswresample 3. 6.100 / 3. 6.100
libpostproc 55. 6.100 / 55. 6.100
Input #0, rtsp, from 'rtsp://whatever.org:8555/whatever.sdp':
Metadata:
title : streamed by the Rtsp Server
comment : RTSP_STREAM_0
Duration: N/A, start: 0.000000, bitrate: N/A
Stream #0:0: Video: hevc (Main), yuvj420p(pc, bt709), 1920x1080, 30 fps, 30 tbr, 90k tbn, 30 tbc
Stream #0:1: Data: none
Input #1, lavfi, from 'anullsrc':
Duration: N/A, start: 0.000000, bitrate: 705 kb/s
Stream #1:0: Audio: pcm_u8, 44100 Hz, stereo, u8, 705 kb/s
Stream mapping:
Stream #0:0 -> #0:0 (hevc (native) -> h264 (libx264))
Stream #1:0 -> #0:1 (pcm_u8 (native) -> mp3 (libmp3lame))
Press [q] to stop, [?] for help
[hevc @ 0x5fbfd80] Could not find ref with POC 4
[libx264 @ 0x5fb9d80] using cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2
[libx264 @ 0x5fb9d80] profile Constrained Baseline, level 4.0, 4:2:0, 8-bit
[libx264 @ 0x5fb9d80] 264 - core 157 r2969 d4099dd - H.264/MPEG-4 AVC codec - Copyleft 2003-2019 - http://www.videolan.org/x264.html - options: cabac=0 ref=1 deblock=0:0:0 analyse=0:0 me=dia subme=0 psy=1 psy_rd=1.00:0.00 mixed_ref=0 me_range=16 chroma_me=1 trellis=0 8x8dct=0 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=0 threads=3 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=0 weightp=0 keyint=60 keyint_min=31 scenecut=0 intra_refresh=0 rc_lookahead=0 rc=abr mbtree=0 bitrate=2250 ratetol=1.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 vbv_maxrate=6000 vbv_bufsize=6000 nal_hrd=none filler=0 ip_ratio=1.40 aq=0
Output #0, flv, to 'rtmp://b.rtmp.youtube.com/live2/xxxx-xxxx-xxxx-xxxx':
Metadata:
title : streamed by the Rtsp Server
comment : RTSP_STREAM_0
encoder : Lavf58.32.104
Stream #0:0: Video: h264 (libx264) ([7][0][0][0] / 0x0007), yuvj420p(pc, progressive), 1920x1080, q=-1--1, 2250 kb/s, 30 fps, 1k tbn, 30 tbc
Metadata:
encoder : Lavc58.56.101 libx264
Side data:
cpb: bitrate max/min/avg: 6000000/0/2250000 buffer size: 6000000 vbv_delay: N/A
Stream #0:1: Audio: mp3 (libmp3lame) ([2][0][0][0] / 0x0002), 44100 Hz, stereo, s16p, 128 kb/s
Metadata:
encoder : Lavc58.56.101 libmp3lame
frame= 522 fps=31 q=19.0 size= 8802Kb time=00:00:36:53 bitrate 2481.0kbits/s dup= drop=2 speed=1.01xBasically all looks good to me - still, in YouTube Studio only the waiting icon with the "Connect streaming software to start preview" message is visible, not video feed.
Any ideas ?