
Recherche avancée
Autres articles (73)
-
Contribute to documentation
13 avril 2011Documentation is vital to the development of improved technical capabilities.
MediaSPIP welcomes documentation by users as well as developers - including : critique of existing features and functions articles contributed by developers, administrators, content producers and editors screenshots to illustrate the above translations of existing documentation into other languages
To contribute, register to the project users’ mailing (...) -
MediaSPIP en mode privé (Intranet)
17 septembre 2013, parÀ partir de la version 0.3, un canal de MediaSPIP peut devenir privé, bloqué à toute personne non identifiée grâce au plugin "Intranet/extranet".
Le plugin Intranet/extranet, lorsqu’il est activé, permet de bloquer l’accès au canal à tout visiteur non identifié, l’empêchant d’accéder au contenu en le redirigeant systématiquement vers le formulaire d’identification.
Ce système peut être particulièrement utile pour certaines utilisations comme : Atelier de travail avec des enfants dont le contenu ne doit pas (...) -
Multilang : améliorer l’interface pour les blocs multilingues
18 février 2011, parMultilang est un plugin supplémentaire qui n’est pas activé par défaut lors de l’initialisation de MediaSPIP.
Après son activation, une préconfiguration est mise en place automatiquement par MediaSPIP init permettant à la nouvelle fonctionnalité d’être automatiquement opérationnelle. Il n’est donc pas obligatoire de passer par une étape de configuration pour cela.
Sur d’autres sites (9697)
-
NV12 textures not working in DirectX 11.1
28 mars 2017, par André VitorI’m trying to render NV12 textures from frames decoded with ffmpeg 2.8.11 using DirectX 11.1 but when I do render them the texture is broken and the color is always off.
Result is : http://imgur.com/a/YIVQk
Code below is how I get the frame decoded by ffmpeg that is in YUV420P format and then I convert(not sure) to NV12 format by interleaving the U and V planes.
static uint8_t *pixelsPtr_ = nullptr;
UINT rowPitch = ((width + 1) >> 1) * 2;
UINT imageSize = (rowPitch * height) + ((rowPitch * height + 1) >> 1);
if (!pixelsPtr_)
{
pixelsPtr_ = new uint8_t[imageSize];
}
int j, position = 0;
uint32_t pitchY = avFrame.linesize[0];
uint32_t pitchU = avFrame.linesize[1];
uint32_t pitchV = avFrame.linesize[2];
uint8_t *avY = avFrame.data[0];
uint8_t *avU = avFrame.data[1];
uint8_t *avV = avFrame.data[2];
::SecureZeroMemory(pixelsPtr_, imageSize);
for (j = 0; j < height; j++)
{
::CopyMemory(pixelsPtr_ + position, avY, (width));
position += (width);
avY += pitchY;
}
for (j = 0; j < height >> 1; j++)
{
::CopyMemory(pixelsPtr_ + position, avU, (width >> 1));
position += (width >> 1);
avU += pitchU;
::CopyMemory(pixelsPtr_ + position, avV, (width >> 1));
position += (width >> 1);
avV += pitchV;
}This is how I’m creating the Texture2D with the data I just got.
// Create texture
D3D11_TEXTURE2D_DESC desc;
desc.Width = width;
desc.Height = height;
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Format = DXGI_FORMAT_NV12;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
D3D11_SUBRESOURCE_DATA initData;
initData.pSysMem = pixelsPtr_;
initData.SysMemPitch = rowPitch;
ID3D11Texture2D* tex = nullptr;
hr = d3dDevice->CreateTexture2D(&desc, &initData, &tex);
if (SUCCEEDED(hr) && tex != 0)
{
D3D11_SHADER_RESOURCE_VIEW_DESC SRVDesc;
memset(&SRVDesc, 0, sizeof(SRVDesc));
SRVDesc.Format = DXGI_FORMAT_R8_UNORM;
SRVDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
SRVDesc.Texture2D.MipLevels = 1;
hr = d3dDevice->CreateShaderResourceView(tex, &SRVDesc, &textureViewYUV[0]);
if (FAILED(hr))
{
tex->Release();
return hr;
}
SRVDesc.Format = DXGI_FORMAT_R8G8_UNORM;
hr = d3dDevice->CreateShaderResourceView(tex, &SRVDesc, &textureViewYUV[1]);
if (FAILED(hr))
{
tex->Release();
return hr;
}
tex->Release();
}Then I pass both Shader Resource View to Pixel Shader
graphics->Context()->PSSetShaderResources(0, 2, textureViewYUV);
This is the pixel shader :
struct PixelShaderInput
{
float4 pos : SV_POSITION;
float4 Color : COLOR;
float2 texCoord : TEXCOORD;
};
static const float3x3 YUVtoRGBCoeffMatrix =
{
1.164383f, 1.164383f, 1.164383f,
0.000000f, -0.391762f, 2.017232f,
1.596027f, -0.812968f, 0.000000f
};
Texture2D<float> luminanceChannel;
Texture2D<float2> chrominanceChannel;
SamplerState linearfilter
{
Filter = MIN_MAG_MIP_LINEAR;
};
float3 ConvertYUVtoRGB(float3 yuv)
{
// Derived from https://msdn.microsoft.com/en-us/library/windows/desktop/dd206750(v=vs.85).aspx
// Section: Converting 8-bit YUV to RGB888
// These values are calculated from (16 / 255) and (128 / 255)
yuv -= float3(0.062745f, 0.501960f, 0.501960f);
yuv = mul(yuv, YUVtoRGBCoeffMatrix);
return saturate(yuv);
}
float4 main(PixelShaderInput input) : SV_TARGET
{
float y = luminanceChannel.Sample(linearfilter, input.texCoord);
float2 uv = chrominanceChannel.Sample(linearfilter, input.texCoord);
float3 YUV = float3(y, uv.x, uv.y);
float4 YUV4 = float4(YUV.x, YUV.y, YUV.z, 1);
float3 RGB = ConvertYUVtoRGB(YUV);
float4 RGB4 = float4(RGB.x, RGB.y, RGB.z, 1);
return RGB4;
}
</float2></float>Can someone help me ? What I’m doing wrong ?
EDIT #1
int skipLineArea = 0;
int uvCount = (height >> 1) * (width >> 1);
for (j = 0, k = 0; j < uvCount; j++, k++)
{
if (skipLineArea == (width >> 1))
{
k += pitchU - (width >> 1);
skipLineArea = 0;
}
pixelsPtr_[position++] = avU[k];
pixelsPtr_[position++] = avV[k];
skipLineArea++;
}EDIT #2
Updating the texture instead of creating new ones
D3D11_MAPPED_SUBRESOURCE mappedResource;
d3dContext->Map(tex, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
uint8_t* mappedData = reinterpret_cast(mappedResource.pData);
for (UINT i = 0; i < height * 1.5; ++i)
{
memcpy(mappedData, frameData, rowPitch);
mappedData += mappedResource.RowPitch;
frameData += rowPitch;
}
d3dContext->Unmap(tex, 0); -
FFMPEG in Android doesn't create output file.
17 septembre 2018, par Wongeun ChoI am currently using FFMPEG in my Android project for converting a video file to an audio file.
When I execute converting by FFMPEG library, no error occurs. However, the output file is not created in the folder which I already specified.
Here is my code for generating audio file.
OnConvertButtonClickListener convertButtonClickListener = new OnConvertButtonClickListener() {
@Override
public void onClick(int position) {
Converter.loadFFMpegBinary();
String cmd = CMD_HEAD + videoItems.get(position).getTitle() + CMD_STRICT;
String[] fileDir = videoItems.get(position).getTitle().split(File.separator);
String fileName = fileDir[fileDir.length-1];
String out_audio_file = FileManager.getHomeDir()+ File.separator+ fileName.substring(0, fileName.length()-3)+"aac";
Log.d("tag1", out_audio_file);
cmd = cmd+out_audio_file;
Log.e("tag1", cmd);
String[] command = cmd.split(" ");
Converter.execFFmpegBinary(command);
}
};This is exeFFmpegBinary method code, and after executing this method, success is displayed in my Log window.
public static void execFFmpegBinary(final String[] command) {
try {
ffmpeg.execute(command, new ExecuteBinaryResponseHandler() {
@Override
public void onFailure(String s) {
Log.d("execFFmpegBinary", "fail");
}
@Override
public void onSuccess(String s) {
Log.d("execFFmpegBinary", "success");
}
@Override
public void onProgress(String s) {
Log.d("execFFmpegBinary", "progress");
}
@Override
public void onStart() {
Log.d("execFFmpegBinary", "start");
}
@Override
public void onFinish() {
Log.d("execFFmpegBinary", "finish");
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
// do nothing for now
Log.d("execFFmpegBinary", "Exception");
}
}Below is an example of my cmd.
-version -y -i /storage/emulated/0/DCIM/Camera/20180104_031417.mp4 -f aac -ab 192000 -vn /storage/emulated/0/Memento/20180104_031417.aac
Anyone knows why my output file doesn’t be created ?
-
How to use loop to play specific sequence of files
24 juillet 2014, par AndyFiles are played with the following tommand
root/bin/ffmpeg -re -i $(ls /usr/btv/studio1/*.mp4 | shuf -n 1) -vcodec copy -preset superfast -acodec copy -ar 44100 -ab 32k -f flv rtmp://ip:1935/live/studio1
This picks a random file usr/btv/studio1/ and streams it.
Within the same directory, there is a commercial.mp4 file.
How would I use a loop in this case to play commercial after each other file and have 5 second wait between the switches.
As an example :
root/bin/ffmpeg -re -i $(ls /usr/btv/studio1/*.mp4 | shuf -n 1) -vcodec copy -preset superfast -acodec copy -ar 44100 -ab 32k -f flv rtmp://ip:1935/live/studio1
Play: JohnWedding.mp4
File ends (+5 seconds)
Play: commercial.mp4
File ends (+5 seconds)
Play: Flowers.mp4