
Advanced search
Medias (91)
-
Richard Stallman et le logiciel libre
19 October 2011, by
Updated: May 2013
Language: français
Type: Text
-
Stereo master soundtrack
17 October 2011, by
Updated: October 2011
Language: English
Type: Audio
-
Elephants Dream - Cover of the soundtrack
17 October 2011, by
Updated: October 2011
Language: English
Type: Picture
-
#7 Ambience
16 October 2011, by
Updated: June 2015
Language: English
Type: Audio
-
#6 Teaser Music
16 October 2011, by
Updated: February 2013
Language: English
Type: Audio
-
#5 End Title
16 October 2011, by
Updated: February 2013
Language: English
Type: Audio
Other articles (103)
-
MediaSPIP 0.1 Beta version
25 April 2011, byMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
MediaSPIP version 0.1 Beta
16 April 2011, byMediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
Amélioration de la version de base
13 September 2013Jolie sélection multiple
Le plugin Chosen permet d’améliorer l’ergonomie des champs de sélection multiple. Voir les deux images suivantes pour comparer.
Il suffit pour cela d’activer le plugin Chosen (Configuration générale du site > Gestion des plugins), puis de configurer le plugin (Les squelettes > Chosen) en activant l’utilisation de Chosen dans le site public et en spécifiant les éléments de formulaires à améliorer, par exemple select[multiple] pour les listes à sélection multiple (...)
On other websites (10378)
-
Delphi, TBitmap (rgb) to YCbCr colors format
18 October 2019, by Alexander M.have a video encoding example from http://www.delphiffmpeg.com - need to convert a set of TBitmaps to YCbCr (YUV), how should we do it? the example contains dummy colors:
(* encode 1 second of video *)
idx := 1;
for i := 0 to 25 - 1 do
begin
av_init_packet(@pkt);
pkt.data := nil; // packet data will be allocated by the encoder
pkt.size := 0;
//fflush(stdout);
(* prepare a dummy image *)
(* Y *)
for y := 0 to c.height - 1 do
for x := 0 to c.width - 1 do
PByte(@PAnsiChar(frame.data[0])[y * frame.linesize[0] + x])^ := x + y + i * 3;
(* Cb and Cr *)
for y := 0 to c.height div 2 - 1 do
for x := 0 to c.width div 2 - 1 do
begin
PByte(@PAnsiChar(frame.data[1])[y * frame.linesize[1] + x])^ := 128 + y + i * 2;
PByte(@PAnsiChar(frame.data[2])[y * frame.linesize[2] + x])^ := 64 + x + i * 5;
end;
frame.pts := i;
(* encode the image *)
ret := avcodec_encode_video2(c, @pkt, frame, @got_output);
if ret < 0 then
begin
Writeln(ErrOutput, 'Error encoding frame');
ExitCode := 1;
Exit;
end;
if got_output <> 0 then
begin
Writeln(Format('Write frame %d (size=%d)', [idx, pkt.size]));
FileWrite(f, pkt.data^, pkt.size);
av_packet_unref(@pkt);
Inc(idx);
end;
end;But we need to convert bitmaps to YCbCr..instead of filling pixels with dummy image. Here is a full source code:
(*
* Video encoding example
*)
procedure video_encode_example(const filename: string; codec_id: TAVCodecID);
const
endcode: array[0..3] of Byte = ( 0, 0, 1, $b7 );
var
codec: PAVCodec;
c: PAVCodecContext;
idx, i, ret, x, y, got_output: Integer;
f: THandle;
frame: PAVFrame;
pkt: TAVPacket;
begin
Writeln(Format('Encode video file %s', [filename]));
(* find the mpeg1 video encoder *)
codec := avcodec_find_encoder(codec_id);
if not Assigned(codec) then
begin
Writeln(ErrOutput, 'Codec not found');
ExitCode := 1;
Exit;
end;
c := avcodec_alloc_context3(codec);
if not Assigned(c) then
begin
Writeln(ErrOutput, 'Could not allocate video codec context');
ExitCode := 1;
Exit;
end;
(* put sample parameters *)
c.bit_rate := 400000;
(* resolution must be a multiple of two *)
c.width := 352;
c.height := 288;
(* frames per second *)
c.time_base.num := 1;
c.time_base.den := 25;
(* emit one intra frame every ten frames
* check frame pict_type before passing frame
* to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
* then gop_size is ignored and the output of encoder
* will always be I frame irrespective to gop_size
*)
c.gop_size := 10;
c.max_b_frames := 1;
c.pix_fmt := AV_PIX_FMT_YUV420P;
if codec_id = AV_CODEC_ID_H264 then
av_opt_set(c.priv_data, 'preset', 'slow', 0);
(* open it *)
if avcodec_open2(c, codec, nil) < 0 then
begin
Writeln(ErrOutput, 'Could not open codec');
ExitCode := 1;
Exit;
end;
f := FileCreate(filename);
if f = INVALID_HANDLE_VALUE then
begin
Writeln(ErrOutput, Format('Could not open %s', [filename]));
ExitCode := 1;
Exit;
end;
frame := av_frame_alloc();
if not Assigned(frame) then
begin
Writeln(ErrOutput, 'Could not allocate video frame');
ExitCode := 1;
Exit;
end;
frame.format := Ord(c.pix_fmt);
frame.width := c.width;
frame.height := c.height;
(* the image can be allocated by any means and av_image_alloc() is
* just the most convenient way if av_malloc() is to be used *)
ret := av_image_alloc(@frame.data[0], @frame.linesize[0], c.width, c.height,
c.pix_fmt, 32);
if ret < 0 then
begin
Writeln(ErrOutput, 'Could not allocate raw picture buffer');
ExitCode := 1;
Exit;
end;
(* encode 1 second of video *)
idx := 1;
for i := 0 to 25 - 1 do
begin
av_init_packet(@pkt);
pkt.data := nil; // packet data will be allocated by the encoder
pkt.size := 0;
//fflush(stdout);
(* prepare a dummy image *)
(* Y *)
for y := 0 to c.height - 1 do
for x := 0 to c.width - 1 do
PByte(@PAnsiChar(frame.data[0])[y * frame.linesize[0] + x])^ := x + y + i * 3;
(* Cb and Cr *)
for y := 0 to c.height div 2 - 1 do
for x := 0 to c.width div 2 - 1 do
begin
PByte(@PAnsiChar(frame.data[1])[y * frame.linesize[1] + x])^ := 128 + y + i * 2;
PByte(@PAnsiChar(frame.data[2])[y * frame.linesize[2] + x])^ := 64 + x + i * 5;
end;
frame.pts := i;
(* encode the image *)
ret := avcodec_encode_video2(c, @pkt, frame, @got_output);
if ret < 0 then
begin
Writeln(ErrOutput, 'Error encoding frame');
ExitCode := 1;
Exit;
end;
if got_output <> 0 then
begin
Writeln(Format('Write frame %d (size=%d)', [idx, pkt.size]));
FileWrite(f, pkt.data^, pkt.size);
av_packet_unref(@pkt);
Inc(idx);
end;
end;
(* get the delayed frames *)
repeat
//fflush(stdout);
ret := avcodec_encode_video2(c, @pkt, nil, @got_output);
if ret < 0 then
begin
Writeln(ErrOutput, 'Error encoding frame');
ExitCode := 1;
Exit;
end;
if got_output <> 0 then
begin
Writeln(Format('Write frame %d (size=%d)', [idx, pkt.size]));
FileWrite(f, pkt.data^, pkt.size);
av_packet_unref(@pkt);
Inc(idx);
end;
until got_output = 0;
(* add sequence end code to have a real mpeg file *)
FileWrite(f, endcode[0], SizeOf(endcode));
FileClose(f);
avcodec_close(c);
av_free(c);
av_freep(@frame.data[0]);
av_frame_free(@frame);
Writeln('');
end;yes we know this formula, but what should we do with (* Cb and Cr *) loop that goes up to c.height div 2 - 1 and c.width div 2 - 1 ? all our experiments make correct image geometry but incorrect colors... Here is what we have:
( Y )
for y := 0 to c.height - 1 do
begin
Line := image.ScanLine[y];
for x := 0 to c.width - 1 do
begin
Yy := Round(Line[x].R*0.29900 + Line[x].G*0.58700 + Line[x].B*0.11400);
PByte(@PAnsiChar(frame.data[0])[y * frame.linesize[0] + x])^ := Yy;
end;
end;
( Cb and Cr )
for y := 0 to c.height div 2 - 1 do
begin
Pixels := image.ScanLine[y];
for x := 0 to c.width div 2 - 1 do
begin
Cb := Round(Line[x].R -0.16874 - Line[x].G 0.33126 + Line[x].B * 0.50000) + 128;
Cr := Round(Line[x].R 0.50000 - Line[x].G 0.41869 - Line[x].B * 0.08131) + 64;
PByte(@PAnsiChar(frame.data[1])[y * frame.linesize[1] + x])^ := Cr;
PByte(@PAnsiChar(frame.data[2])[y * frame.linesize[2] + x])^ := Cb;
//PByte(@PAnsiChar(frame.data[1])[y frame.linesize[1] + x])^ := 128 + y + i 2;
//PByte(@PAnsiChar(frame.data[2])[y frame.linesize[2] + x])^ := 64 + x + i 5;
end;
end;How this should be fixed?
-
duration change after transcode ts
25 December 2017, by Feilong Luoi have a problem about transcode with ffmpeg
i want to cover m3u8 to mp4, so i transcode every ts file first, and then concat them to a mp4, but i found that the duration will be bigger than source file.
source file is :
http://oc7iy3eta.bkt.clouddn.com/src_20.tsafter transcode, test file is:
http://oc7iy3eta.bkt.clouddn.com/test_20.tsi use the command as bellow to change to 5fps, and 400k bitrate:
sudo ffmpeg -analyzeduration 2147483647 -probesize 2147483647 -nostdin -y -v warning -i ./src_20.ts -threads 3 -movflags faststart -metadata:s:v rotate=0 -chunk_duration 520000 -video_track_timescale 25000 -pix_fmt yuv420p -copytb 1 -vcodec libx264 -b:v 400000 -minrate 400000 -maxrate 400000 -bufsize 500k -force_key_frames "expr:gte(t,n_forced*2)" -vsync 1 -r 5 -s 544*960 -acodec libfaac -async 1 ./test_20.ts
i use ffprobe command to see video info:
source file info:
Duration: 00:00:01.26, start: 28.346989, bitrate: 921 kb/s
Program 1
Metadata:
service_name : Service01
service_provider: FFmpeg
Stream #0:0[0x100]: Audio: aac ([15][0][0][0] / 0x000F), 44100 Hz, stereo, fltp, 23 kb/s
Stream #0:1[0x101]: Video: h264 (High) ([27][0][0][0] / 0x001B), yuv420p, 544x960, 10.67 tbr, 90k tbn, 180k tbctest file:
Input #0, mpegts, from ’test_20.ts’:
Duration: 00:00:01.62, start: 1.576778, bitrate: 447 kb/s
Program 1
Metadata:
service_name : Service01
service_provider: FFmpeg
Stream #0:0[0x100]: Video: h264 (High) ([27][0][0][0] / 0x001B), yuv420p, 544x960, 5 fps, 5 tbr, 90k tbn, 10 tbc
Stream #0:1[0x101]: Audio: aac ([15][0][0][0] / 0x000F), 44100 Hz, stereo, fltp, 5 kb/s=======================================================================
question
so , we can see that the duration of src file is 1.26s , but after transcode, the test file is 1.62s.
why? can anybody help
-
Encoding RGB frames using x264 and AVCodec in C
6 November 2016, by deepworkI have RGB24 frames streamed from camera and i want to encode them into h264 ,i found that AVCodec and x264 can do so, the problem is x264 as default accepts YUV420 as input so what i wrote was a program which convert RGB frames to YUV420 .that was by sws_scale function .this works well except that it does not satisfy the required FPS because the converting (RGB->YUV420) takes time.
This is how i setup my encoder context :
videoStream->id = 0;
vCodecCtx = videoStream->codec;
vCodecCtx->coder_type = AVMEDIA_TYPE_VIDEO;
vCodecCtx->codec_id = AV_CODEC_ID_H264;
vCodecCtx->bit_rate = 400000;
vCodecCtx->width = Width;
vCodecCtx->height = Height;
vCodecCtx->time_base.den = FPS;
vCodecCtx->time_base.num = 1;
//vCodecCtx->time_base = (AVRational){1,};
vCodecCtx->gop_size = 12;
vCodecCtx->max_b_frames = 1;
vCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
if(formatCtx->oformat->flags & AVFMT_GLOBALHEADER)
vCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
av_opt_set(vCodecCtx->priv_data, "preset", "ultrafast", 0);
av_opt_set(vCodecCtx->priv_data, "profile", "baseline", AV_OPT_SEARCH_CHILDREN);
if (avcodec_open2(vCodecCtx, h264Codec, NULL) < 0){
return 0;
}when i changes AV_PIX_FMT_YUV420P to AV_PIX_FMT_RGB24 ,avcodec_open2 will fail.
i read that there is a version of libx264 for RGB called libx264rgb but i even dont know whether i have to rebuild x264 with enabling this option or to download another source or i have to do it programmatically with the first x264 lib.the question is how to enable RGB as input to libx264 to use with libavcodec in C .or how to make the encoding or sws_scale more fast .
Edit:
How i built ffmpeg :
NDK=D:/AndroidDev/android-ndk-r9
PLATFORM=$NDK/platforms/android-18/arch-arm/
PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.8/prebuilt/windows-x86_64
GENERAL="\
--enable-small \
--enable-cross-compile \
--extra-libs="-lgcc" \
--arch=arm \
--cc=$PREBUILT/bin/arm-linux-androideabi-gcc \
--cross-prefix=$PREBUILT/bin/arm-linux-androideabi- \
--nm=$PREBUILT/bin/arm-linux-androideabi-nm \
--extra-cflags="-I../x264/android/arm/include" \
--extra-ldflags="-L../x264/android/arm/lib" "
MODULES="\
--enable-gpl \
--enable-libx264"
function build_ARMv6
{
./configure \
--target-os=linux \
--prefix=./android/armeabi \
${GENERAL} \
--sysroot=$PLATFORM \
--enable-shared \
--disable-static \
--extra-cflags=" -O3 -fpic -fasm -Wno-psabi -fno-short-enums -fno-strict-aliasing -finline-limit=300 -mfloat-abi=softfp -mfpu=vfp -marm -march=armv6" \
--extra-ldflags="-lx264 -Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib -nostdlib -lc -lm -ldl -llog" \
--enable-zlib \
${MODULES} \
--disable-doc \
--enable-neon
make clean
make
make install
}
build_ARMv6
echo Android ARMEABI builds finishedHow i built x264 :
NDK=D:/AndroidDev/android-ndk-r9
PLATFORM=$NDK/platforms/android-18/arch-arm/
TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.8/prebuilt/windows-x86_64
PREFIX=./android/arm
function build_one
{
./configure \
--prefix=$PREFIX \
--enable-static \
--enable-pic \
--host=arm-linux \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--sysroot=$PLATFORM
make clean
make
make install
}
build_one
echo Android ARM builds finished