
Recherche avancée
Médias (1)
-
Video d’abeille en portrait
14 mai 2011, par
Mis à jour : Février 2012
Langue : français
Type : Video
Autres articles (77)
-
Déploiements possibles
31 janvier 2010, parDeux types de déploiements sont envisageable dépendant de deux aspects : La méthode d’installation envisagée (en standalone ou en ferme) ; Le nombre d’encodages journaliers et la fréquentation envisagés ;
L’encodage de vidéos est un processus lourd consommant énormément de ressources système (CPU et RAM), il est nécessaire de prendre tout cela en considération. Ce système n’est donc possible que sur un ou plusieurs serveurs dédiés.
Version mono serveur
La version mono serveur consiste à n’utiliser qu’une (...) -
Changer son thème graphique
22 février 2011, parLe thème graphique ne touche pas à la disposition à proprement dite des éléments dans la page. Il ne fait que modifier l’apparence des éléments.
Le placement peut être modifié effectivement, mais cette modification n’est que visuelle et non pas au niveau de la représentation sémantique de la page.
Modifier le thème graphique utilisé
Pour modifier le thème graphique utilisé, il est nécessaire que le plugin zen-garden soit activé sur le site.
Il suffit ensuite de se rendre dans l’espace de configuration du (...) -
Ajout d’utilisateurs manuellement par un administrateur
12 avril 2011, parL’administrateur d’un canal peut à tout moment ajouter un ou plusieurs autres utilisateurs depuis l’espace de configuration du site en choisissant le sous-menu "Gestion des utilisateurs".
Sur cette page il est possible de :
1. décider de l’inscription des utilisateurs via deux options : Accepter l’inscription de visiteurs du site public Refuser l’inscription des visiteurs
2. d’ajouter ou modifier/supprimer un utilisateur
Dans le second formulaire présent un administrateur peut ajouter, (...)
Sur d’autres sites (12712)
-
NV12 textures not working in DirectX 11.1
28 mars 2017, par André VitorI’m trying to render NV12 textures from frames decoded with ffmpeg 2.8.11 using DirectX 11.1 but when I do render them the texture is broken and the color is always off.
Result is : http://imgur.com/a/YIVQk
Code below is how I get the frame decoded by ffmpeg that is in YUV420P format and then I convert(not sure) to NV12 format by interleaving the U and V planes.
static uint8_t *pixelsPtr_ = nullptr;
UINT rowPitch = ((width + 1) >> 1) * 2;
UINT imageSize = (rowPitch * height) + ((rowPitch * height + 1) >> 1);
if (!pixelsPtr_)
{
pixelsPtr_ = new uint8_t[imageSize];
}
int j, position = 0;
uint32_t pitchY = avFrame.linesize[0];
uint32_t pitchU = avFrame.linesize[1];
uint32_t pitchV = avFrame.linesize[2];
uint8_t *avY = avFrame.data[0];
uint8_t *avU = avFrame.data[1];
uint8_t *avV = avFrame.data[2];
::SecureZeroMemory(pixelsPtr_, imageSize);
for (j = 0; j < height; j++)
{
::CopyMemory(pixelsPtr_ + position, avY, (width));
position += (width);
avY += pitchY;
}
for (j = 0; j < height >> 1; j++)
{
::CopyMemory(pixelsPtr_ + position, avU, (width >> 1));
position += (width >> 1);
avU += pitchU;
::CopyMemory(pixelsPtr_ + position, avV, (width >> 1));
position += (width >> 1);
avV += pitchV;
}This is how I’m creating the Texture2D with the data I just got.
// Create texture
D3D11_TEXTURE2D_DESC desc;
desc.Width = width;
desc.Height = height;
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Format = DXGI_FORMAT_NV12;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
D3D11_SUBRESOURCE_DATA initData;
initData.pSysMem = pixelsPtr_;
initData.SysMemPitch = rowPitch;
ID3D11Texture2D* tex = nullptr;
hr = d3dDevice->CreateTexture2D(&desc, &initData, &tex);
if (SUCCEEDED(hr) && tex != 0)
{
D3D11_SHADER_RESOURCE_VIEW_DESC SRVDesc;
memset(&SRVDesc, 0, sizeof(SRVDesc));
SRVDesc.Format = DXGI_FORMAT_R8_UNORM;
SRVDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
SRVDesc.Texture2D.MipLevels = 1;
hr = d3dDevice->CreateShaderResourceView(tex, &SRVDesc, &textureViewYUV[0]);
if (FAILED(hr))
{
tex->Release();
return hr;
}
SRVDesc.Format = DXGI_FORMAT_R8G8_UNORM;
hr = d3dDevice->CreateShaderResourceView(tex, &SRVDesc, &textureViewYUV[1]);
if (FAILED(hr))
{
tex->Release();
return hr;
}
tex->Release();
}Then I pass both Shader Resource View to Pixel Shader
graphics->Context()->PSSetShaderResources(0, 2, textureViewYUV);
This is the pixel shader :
struct PixelShaderInput
{
float4 pos : SV_POSITION;
float4 Color : COLOR;
float2 texCoord : TEXCOORD;
};
static const float3x3 YUVtoRGBCoeffMatrix =
{
1.164383f, 1.164383f, 1.164383f,
0.000000f, -0.391762f, 2.017232f,
1.596027f, -0.812968f, 0.000000f
};
Texture2D<float> luminanceChannel;
Texture2D<float2> chrominanceChannel;
SamplerState linearfilter
{
Filter = MIN_MAG_MIP_LINEAR;
};
float3 ConvertYUVtoRGB(float3 yuv)
{
// Derived from https://msdn.microsoft.com/en-us/library/windows/desktop/dd206750(v=vs.85).aspx
// Section: Converting 8-bit YUV to RGB888
// These values are calculated from (16 / 255) and (128 / 255)
yuv -= float3(0.062745f, 0.501960f, 0.501960f);
yuv = mul(yuv, YUVtoRGBCoeffMatrix);
return saturate(yuv);
}
float4 main(PixelShaderInput input) : SV_TARGET
{
float y = luminanceChannel.Sample(linearfilter, input.texCoord);
float2 uv = chrominanceChannel.Sample(linearfilter, input.texCoord);
float3 YUV = float3(y, uv.x, uv.y);
float4 YUV4 = float4(YUV.x, YUV.y, YUV.z, 1);
float3 RGB = ConvertYUVtoRGB(YUV);
float4 RGB4 = float4(RGB.x, RGB.y, RGB.z, 1);
return RGB4;
}
</float2></float>Can someone help me ? What I’m doing wrong ?
EDIT #1
int skipLineArea = 0;
int uvCount = (height >> 1) * (width >> 1);
for (j = 0, k = 0; j < uvCount; j++, k++)
{
if (skipLineArea == (width >> 1))
{
k += pitchU - (width >> 1);
skipLineArea = 0;
}
pixelsPtr_[position++] = avU[k];
pixelsPtr_[position++] = avV[k];
skipLineArea++;
}EDIT #2
Updating the texture instead of creating new ones
D3D11_MAPPED_SUBRESOURCE mappedResource;
d3dContext->Map(tex, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
uint8_t* mappedData = reinterpret_cast(mappedResource.pData);
for (UINT i = 0; i < height * 1.5; ++i)
{
memcpy(mappedData, frameData, rowPitch);
mappedData += mappedResource.RowPitch;
frameData += rowPitch;
}
d3dContext->Unmap(tex, 0); -
Add Windows resource file support for shared libraries
5 décembre 2013, par James AlmerAdd Windows resource file support for shared libraries
Originally written by James Almer <jamrial@gmail.com>
With the following contributions by Timothy Gu <timothygu99@gmail.com>
* Use descriptions of libraries from the pkg-config file generation function
* Use "FFmpeg Project" as CompanyName (suggested by Alexander Strasser)
* Use "FFmpeg" for ProductName as MSDN says "name of the product with which the
file is distributed" [1].
* Use FFmpeg’s version (N-xxxxx-gxxxxxxx) for ProductVersion per MSDN [1].
* Only build the .rc files when —enable-small is not enabled.[1] http://msdn.microsoft.com/en-us/library/windows/desktop/aa381058.aspx
Signed-off-by : James Almer <jamrial@gmail.com>
Signed-off-by : Michael Niedermayer <michaelni@gmx.at>- [DH] Changelog
- [DH] Makefile
- [DH] common.mak
- [DH] configure
- [DH] libavcodec/Makefile
- [DH] libavcodec/avcodecres.rc
- [DH] libavdevice/Makefile
- [DH] libavdevice/avdeviceres.rc
- [DH] libavfilter/Makefile
- [DH] libavfilter/avfilterres.rc
- [DH] libavformat/Makefile
- [DH] libavformat/avformatres.rc
- [DH] libavresample/Makefile
- [DH] libavresample/avresampleres.rc
- [DH] libavutil/Makefile
- [DH] libavutil/avutilres.rc
- [DH] libpostproc/Makefile
- [DH] libpostproc/postprocres.rc
- [DH] library.mak
- [DH] libswresample/Makefile
- [DH] libswresample/swresampleres.rc
- [DH] libswscale/Makefile
- [DH] libswscale/swscaleres.rc
-
ffmpeg show wrong with/height of video
6 mai 2020, par boygiandiI have this video : https://media.gostream.co/uploads/gostream/9wkBeGM7lOfxT902V86hzI22Baj2/23-4-2020/videos/263a34c5a2fe61b33fe17e090893c04e-1587640618504_fs.mp4



When I play it on Google Chrome, it's vertical video. But when I check with ffmpeg



ffmpeg -i "https://media.gostream.co/uploads/gostream/9wkBeGM7lOfxT902V86hzI22Baj2/23-4-2020/videos/263a34c5a2fe61b33fe17e090893c04e-1587640618504_fs.mp4"




It show video dimensions are 1080x1080



Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'a.mp4':
 Metadata:
 major_brand : isom
 minor_version : 512
 compatible_brands: isomiso2avc1mp41
 encoder : Lavf58.35.101
 Duration: 00:00:39.51, start: 0.000000, bitrate: 1577 kb/s
 Stream #0:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p, 1080x1080 [SAR 9:16 DAR 9:16], 1464 kb/s, 23.98 fps, 23.98 tbr, 24k tbn, 47.95 tbc (default)
 Metadata:
 handler_name : VideoHandler
 Stream #0:1(eng): Audio: aac (LC) (mp4a / 0x6134706D), 48000 Hz, stereo, fltp, 128 kb/s (default)
 Metadata:
 handler_name : SoundHandler
At least one output file must be specified




And when I livestream this video to Facebook, it scaled vertical video into square form : https://imgur.com/a/A8dQ7j7



How can I correct video size when livestream ?