
Recherche avancée
Autres articles (44)
-
HTML5 audio and video support
13 avril 2011, parMediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
For older browsers the Flowplayer flash fallback is used.
MediaSPIP allows for media playback on major mobile platforms with the above (...) -
Support audio et vidéo HTML5
10 avril 2011MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...) -
De l’upload à la vidéo finale [version standalone]
31 janvier 2010, parLe chemin d’un document audio ou vidéo dans SPIPMotion est divisé en trois étapes distinctes.
Upload et récupération d’informations de la vidéo source
Dans un premier temps, il est nécessaire de créer un article SPIP et de lui joindre le document vidéo "source".
Au moment où ce document est joint à l’article, deux actions supplémentaires au comportement normal sont exécutées : La récupération des informations techniques des flux audio et video du fichier ; La génération d’une vignette : extraction d’une (...)
Sur d’autres sites (8094)
-
When using ffmpeg api, the encoded gif is output to the dynamically applied buffer
5 septembre 2023, par yangjinhui2936Purpose:I want to use ffmpeg to encode data from rbg8 to gif format.Currently can be encoded into file output.


But I don't know how to store the gif directly in the buffer instead of saving it in the file.


Below is the text of the code:
input_w560_h1280.rgb8


#include 
#include 
#include 
#include 
#include <libavcodec></libavcodec>avcodec.h>
#include <libavutil></libavutil>imgutils.h>
#include <libavformat></libavformat>avformat.h>
#include <libswscale></libswscale>swscale.h>
#include <libavdevice></libavdevice>avdevice.h>

#define CONFIG_FBDEV_OUTDEV 1
#define GET_DATA_TO_BUFF 0

int readFileToBuffer(uint8_t *dst, const char* filename, long* fileSize) {
 FILE* file = fopen(filename, "rb");
 if (!file || dst == NULL) {
 fprintf(stderr, "cannot open file: %s %p\n", filename, dst);
 return -1;
 }

 fseek(file, 0, SEEK_END);
 *fileSize = ftell(file);
 fseek(file, 0, SEEK_SET);

 uint8_t *buffer = NULL;
 buffer = dst;
 if (!buffer) {
 fprintf(stderr, "buffer error\n");
 fclose(file);
 return -1;
 }

 // fread file to buffer
 if (fread(buffer, 1, *fileSize, file) != *fileSize) {
 fprintf(stderr, "read file failed size %ld\n", *fileSize);
 fclose(file);
 free(buffer);
 return -1;
 }

 fclose(file);
 return 0;
}

int writeBufferToFile(const char* filename, uint8_t* buffer, long bufferSize) {
 FILE* file = fopen(filename, "wb");
 if (!file) {
 fprintf(stderr, "cannot open: %s\n", filename);
 return -1;
 }

 if (fwrite(buffer, 1, bufferSize, file) != bufferSize) {
 fprintf(stderr, "cannot fwrite\n");
 fclose(file);
 return -1;
 }

 fclose(file);
 return 0;
}

int main() {
 const char *output_filename = "output.gif";
 const char *input_filename = "input_w560_h1280.rgb8";
 int width = 560;
 int height = 1280;

 avcodec_register_all();
 avdevice_register_all();
 av_register_all();

 av_log_set_level(AV_LOG_MAX_OFFSET);

 AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_GIF);
 if (!codec) {
 printf("GIF encoder not found.\n");
 return 1;
 }
 
 AVCodecContext *codecContext = avcodec_alloc_context3(codec);
 if (!codecContext) {
 printf("Could not allocate codec codecContext.\n");
 return 1;
 }

 codecContext->width = width;
 codecContext->height = height;
 codecContext->pix_fmt = AV_PIX_FMT_RGB8;
 codecContext->time_base = (AVRational){1, 25};
 
 if (avcodec_open2(codecContext, codec, NULL) < 0) {
 printf("Could not open codec.\n");
 return 1;
 }

 AVFormatContext *outputFormatContext = NULL;
 if (avformat_alloc_output_context2(&outputFormatContext, NULL, NULL, output_filename) < 0) {
 printf("Could not allocate output format codecContext.\n");
 return 1;
 }

 AVStream *stream = avformat_new_stream(outputFormatContext, NULL);
 if (!stream) {
 printf("Could not create stream.\n");
 return 1;
 }

 AVCodecParameters *codec_params = stream->codecpar;
 avcodec_parameters_from_context(codec_params, codecContext);

 if (avio_open(&outputFormatContext->pb, output_filename, AVIO_FLAG_WRITE) < 0) {
 printf("Could not open output file.\n");
 return 1;
 }

 /* write header for output */
 if(avformat_write_header(outputFormatContext, NULL) < 0)
 {
 printf("avformat_write_header failed\n");
 }

 // creat and init frame
 AVFrame *frame = av_frame_alloc();
 frame->format = AV_PIX_FMT_RGB8;
 frame->width = width;
 frame->height = height;
 /* check frame buff */
 if (av_frame_get_buffer(frame, 0) < 0) {
 printf("Failed to allocate frame buffer.\n");
 return -1;
 }

 /* read rgb data to frame->data[0] */
 uint8_t *rgb8data = (uint8_t *)av_malloc(width * height * 3);
 long read_size = 0;
 int ret = readFileToBuffer(rgb8data, input_filename, &read_size);
 if (ret != 0 || read_size == 0)
 {
 printf("error argbData %p read_size %ld\n", rgb8data, read_size);
 }
 /* cp input data to frame */
 av_image_copy_plane(frame->data[0], frame->linesize[0], rgb8data, width, width, height);

 // Init IOcontext (try to get the gif stream)
 #if GET_DATA_TO_BUFF
 AVIOContext *outputIoContext = NULL;

 if (avio_open_dyn_buf(&outputIoContext) < 0) {
 printf("Failed to open output buffer.\n");
 return -1;
 }
 #endif
 // Encoding loop (simulated frames)
 AVPacket pkt;
 av_init_packet(&pkt);
 pkt.data = NULL;
 pkt.size = 0;
 avcodec_send_frame(codecContext, frame);

 while (avcodec_receive_packet(codecContext, &pkt) == 0) {
 pkt.stream_index = stream->index;
 #if GET_DATA_TO_BUFF // try to get the gif stream
 avio_write(outputIoContext, pkt.data, pkt.size);
 #endif
 av_write_frame(outputFormatContext, &pkt);
 av_packet_unref(&pkt);
 }
 /* write end */
 av_write_trailer(outputFormatContext);
 // try to get the gif stream
 #if GET_DATA_TO_BUFF
 uint8_t *outputBuffer;
 int outputBufferSize = avio_close_dyn_buf(outputIoContext, &outputBuffer);
 writeBufferToFile("bf_1.gif", outputBuffer, outputBufferSize);
 #endif
 /* free */
 avformat_close_input(&outputFormatContext);
 av_frame_free(&frame);
 avcodec_free_context(&codecContext);
 av_free(rgb8data);

 return 0;
}



MAKEFILE


CC = gcc
CFLAGS = -Wall -g
CFLAGS += -I ../output/include
CFLAGS += -L ../output/lib
LIBS = -l:libavdevice.a -l:libavformat.a -l:libavcodec.a -lavfilter -lswresample -lswscale -l:libavutil.a -lpthread -lz -lbz2 -lm -ldl

all: encode_to_gif

encode_to_gif: encode_to_gif.c
 $(CC) $(CFLAGS) -o $@ $< $(LIBS)



Tried The content in the macro definition is the relevant solution I found online. Unfortunately, the data I got is in original rgb8 format, not in gif format.ffmpeg writes data to file and Data dumped from buffer
The data dumped from the buffer seems to have no gif header


My expected result I want to output the encoded gif directly to the buffer,but i don't know what to do.




-
avi encoded to streaming mp4 or webmnot playing in html5 player
30 mai 2013, par Vprnl[EDIT]
I'm trying to get ffmpeg to encode various AVI files to mp4 for streaming purposes
I use this nodejs to start FFMPEG.
When I try this (webm) (some settings are wrapped by the node module but produce the default FFMPEG command) with this command :
.withVideoCodec('libvpx')
.addOptions(['-bf 8','-bt 240k','-preset fast','-strict -2','-b:v 320K','-bufsize 62000', '-maxrate 620k','-movflags +empty_moov','-y'])
.withAudioBitrate('192k')
.withAudioCodec('libvorbis')
.toFormat('webm')The video get's streamed properly to the client but the duration isn't passed on. So the video has a duration of 'infinite'.
So I tried to encode with H264. Which also works (I see the duration being set in the client) but no picture sadly.
For H264 I use :
.addOptions(['-y','-vcodec libx264','-bf 8','-bt 240k','-preset fast','-strict -2','-b:v 320K','-bufsize 62000', '-maxrate 620k','-acodec aac','-ab 128k','-movflags +empty_moov'])
.toFormat('mp4')I get this log :
I hope someone can point me in the right direction. Thanks !
The client just gives an undefined error.
I hope someone can point me in the right direction.Thanks
-
How would I send x264 encoded frames correctly over a network using UDP ?
5 avril 2020, par Eoin McBennettI'm trying to send the encoded h264 frames I have over a network as I get them, currently I'm only streaming the nal units I get from each frame as it is encoded, is this the correct approach ?



I wrote a receiver application on a different computer to get the nals and wrote them all to a file sequentially, when played with vlc I didn't get any video and instead just got a screeching noise. I'm not sure exactly where the problem would lie here. I have included the result of the FFmpeg -I command on the file created.



Encoder and sender code




 //Udp initialisation
 struct sockaddr_in broadcastAddr;
 int sock;
 int yes = 1;
 int addr_len;
 int count;
 fd_set readfd;
 char buffer[1024];
 int i;

 sock = socket(AF_INET, SOCK_DGRAM,0);

 if(sock < 0){
 std::cout << "Failed to initialise socket!" << std::endl;
 }

 int ret = setsockopt(sock, SOL_SOCKET, SO_BROADCAST, (char*)&yes, sizeof(yes));
 if(ret < 0){
 std::cout << "setsockopt error!" </ the size of the address

 memset((void*)&broadcastAddr,0,addr_len); //0 out the address bits

 broadcastAddr.sin_family = AF_INET;
 broadcastAddr.sin_addr.s_addr = INADDR_BROADCAST;
 broadcastAddr.sin_port = PORT;



 //Set the encoder parameters
 x264_param_t param;
 x264_param_default_preset(&param,"veryfast","zerolatency");
 param.i_threads = 1;
 param.i_width = camera.getWidth();
 param.i_height = camera.getHeight();
 param.i_fps_num = 30;
 param.i_fps_den = 1;
// Intra refres:
 param.i_keyint_max = 30;
 param.b_intra_refresh = 1;
//Rate control:
 param.rc.i_rc_method = X264_RC_CRF;
 param.rc.f_rf_constant = 25;
 param.rc.f_rf_constant_max = 35;
//For streaming:
 param.b_repeat_headers = 1;
 param.b_annexb = 1;
 x264_param_apply_profile(&param, "baseline");

 x264_t *encoder = x264_encoder_open(&param); //H.264 encoder object
 x264_picture_t pic_in, pic_out;
 x264_picture_alloc(&pic_in, X264_CSP_I420,camera.getWidth(), camera.getHeight());

 //Network abstraction layer units for broadcast
 x264_nal_t *nals;
 int i_nals;

 while(true){

 //If there is valid data in the processing queue
 if(!encoderQueue.empty()){

 //File the x264 input data structure with the file data
 fillImage(encoderQueue.front(),camera.getWidth(),camera.getHeight(),&pic_in);

 //Encode and send
 int frame_size = x264_encoder_encode(encoder, &nals, &i_nals, &pic_in, &pic_out);
 if (frame_size >= 0) {
 //The frame is ready to be sent over UDP!
 for(int i = 0; i < i_nals; i++){
 ret = sendto(sock, &nals[0].p_payload, frame_size,0,(struct sockaddr*)&broadcastAddr,addr_len);
 if(ret > 0){
 std::cout << "Streamed frame nal unit " << i << std::endl;
 } else{
 std::cout << "Failed to stream nal unit " << i << std::endl;
 }
 }
 }
 else{
 std::cout<<"Failed to encode h264 frame!" << std::endl;
 }
 //Finsihed with the current frame, pop it off the queue and remove any nals to do with it
 encoderQueue.pop();
 frame_size = 0;
 nals = nullptr;
 i_nals = 0;
 }


 }




Receiver application



#include <iostream>
#include 
#include <network></network>Network.h>
#include <netinet></netinet>in.h>
#include <arpa></arpa>inet.h>
#include <sys></sys>types.h>
#include <sys></sys>socket.h>
#include <queue>


#define BUFFER_LEN 10000
#define PORT_NO 3879



int main(int argc, const char * argv[]) {


 FILE *file; //File to write the h264 nals too

 //Declare the address memory space
 struct sockaddr_in sockAddr , bcAddr;
 socklen_t bcAddr_len = sizeof(&bcAddr); //Store the length of the broadcast address structure
 //0 out the assigned memory
 memset(&sockAddr, 0, sizeof(sockAddr));
 memset(&bcAddr, 0 ,sizeof(bcAddr));

 //Set the address parameters to look for incoming IpV4/UDP data
 sockAddr.sin_family = AF_INET;
 sockAddr.sin_port = htons(PORT_NO);
 sockAddr.sin_addr.s_addr = htonl(INADDR_ANY);

 bcAddr.sin_family = AF_INET;
 bcAddr.sin_port = PORT_NO;
 inet_aton("255.255.255.255",&bcAddr.sin_addr);

 //Initialise a udp socket to read broadcast bytes
 int soc = socket(AF_INET, SOCK_DGRAM,0);

 //Check socket init
 if(soc < 0){
 std::cout << "Failed to initialise UDP socket!" << std::endl;
 return -1;
 }

 //Bind the address details to the socket, check for errors
 if(bind(soc, (struct sockaddr*)&sockAddr, sizeof(sockAddr)) < 0){
 std::cout << "Failed to bind address structure to socket!" << std::endl;
 return -2;
 }

 file = fopen("stream.h264","wb"); // Open the file for writing

 unsigned char buffer[BUFFER_LEN];

 while(true){


 memset(&buffer, 0, sizeof(unsigned char) * BUFFER_LEN);

 int recv_len = recvfrom(soc, buffer, BUFFER_LEN, 0, (struct sockaddr *)&bcAddr, &bcAddr_len);

 std::cout<< "Received " << recv_len << "bytes on broadcast address" << std::endl;

 fwrite(&buffer, sizeof(unsigned char), recv_len, file);
 }

 return 0;
}
</queue></iostream>



FFMPEG -I output






Any help would be greatly appreciated.