
Recherche avancée
Autres articles (103)
-
Encoding and processing into web-friendly formats
13 avril 2011, parMediaSPIP automatically converts uploaded files to internet-compatible formats.
Video files are encoded in MP4, Ogv and WebM (supported by HTML5) and MP4 (supported by Flash).
Audio files are encoded in MP3 and Ogg (supported by HTML5) and MP3 (supported by Flash).
Where possible, text is analyzed in order to retrieve the data needed for search engine detection, and then exported as a series of image files.
All uploaded files are stored online in their original format, so you can (...) -
Les formats acceptés
28 janvier 2010, parLes commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
ffmpeg -codecs ffmpeg -formats
Les format videos acceptés en entrée
Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
Les formats vidéos de sortie possibles
Dans un premier temps on (...) -
Gestion de la ferme
2 mars 2010, parLa ferme est gérée dans son ensemble par des "super admins".
Certains réglages peuvent être fais afin de réguler les besoins des différents canaux.
Dans un premier temps il utilise le plugin "Gestion de mutualisation"
Sur d’autres sites (8219)
-
Revision 70ad668056 : vpx_dsp/prob.h : vp9_ -> vpx_ change prefix vp9_ to vpx_ for non codec specific
20 juillet 2015, par Yaowu XuChanged Paths :
Modify /vp9/common/vp9_entropy.c
Modify /vp9/common/vp9_entropy.h
Modify /vp9/common/vp9_entropymode.c
Modify /vp9/common/vp9_entropymode.h
Modify /vp9/common/vp9_entropymv.c
Modify /vp9/common/vp9_entropymv.h
Modify /vp9/common/vp9_seg_common.c
Modify /vp9/common/vp9_seg_common.h
Modify /vp9/encoder/vp9_bitstream.c
Modify /vp9/encoder/vp9_cost.c
Modify /vp9/encoder/vp9_cost.h
Modify /vp9/encoder/vp9_encodemv.c
Modify /vp9/encoder/vp9_tokenize.c
Modify /vp9/encoder/vp9_tokenize.h
Modify /vp9/encoder/vp9_treewriter.c
Modify /vp9/encoder/vp9_treewriter.h
Modify /vpx_dsp/bitreader.h
Modify /vpx_dsp/bitwriter.h
Modify /vpx_dsp/prob.c
Modify /vpx_dsp/prob.h
vpx_dsp/prob.h : vp9_ -> vpx_change prefix vp9_ to vpx_ for non codec specific functions and data
structures.Change-Id : I97c7e6422eceea99212b93f4942bc2187763a07c
-
lavc : Consistently prefix input buffer defines
29 juin 2015, par Vittorio Giovaralavc : Consistently prefix input buffer defines
Signed-off-by : Vittorio Giovara <vittorio.giovara@gmail.com>
- [DH] avconv.c
- [DH] doc/examples/avcodec.c
- [DH] doc/examples/qsvdec.c
- [DH] libavcodec/4xm.c
- [DH] libavcodec/a64multienc.c
- [DH] libavcodec/aac_adtstoasc_bsf.c
- [DH] libavcodec/aac_parser.c
- [DH] libavcodec/aacdec.c
- [DH] libavcodec/aacenc.c
- [DH] libavcodec/ac3_parser.c
- [DH] libavcodec/ac3dec.h
- [DH] libavcodec/adpcmenc.c
- [DH] libavcodec/alacenc.c
- [DH] libavcodec/asvenc.c
- [DH] libavcodec/atrac3.c
- [DH] libavcodec/avcodec.h
- [DH] libavcodec/avpacket.c
- [DH] libavcodec/binkaudio.c
- [DH] libavcodec/cdxl.c
- [DH] libavcodec/cook.c
- [DH] libavcodec/dca_parser.c
- [DH] libavcodec/dss_sp.c
- [DH] libavcodec/dump_extradata_bsf.c
- [DH] libavcodec/dvdec.c
- [DH] libavcodec/ffv1enc.c
- [DH] libavcodec/g2meet.c
- [DH] libavcodec/get_bits.h
- [DH] libavcodec/gif.c
- [DH] libavcodec/h264.c
- [DH] libavcodec/h264_mp4toannexb_bsf.c
- [DH] libavcodec/hevc_mp4toannexb_bsf.c
- [DH] libavcodec/hevc_parse.c
- [DH] libavcodec/huffyuvdec.c
- [DH] libavcodec/huffyuvenc.c
- [DH] libavcodec/iff.c
- [DH] libavcodec/imc.c
- [DH] libavcodec/imx_dump_header_bsf.c
- [DH] libavcodec/internal.h
- [DH] libavcodec/jpeglsenc.c
- [DH] libavcodec/libdcadec.c
- [DH] libavcodec/libfaac.c
- [DH] libavcodec/libfdk-aacenc.c
- [DH] libavcodec/libopenh264enc.c
- [DH] libavcodec/libopusenc.c
- [DH] libavcodec/libspeexenc.c
- [DH] libavcodec/libvo-aacenc.c
- [DH] libavcodec/libvorbis.c
- [DH] libavcodec/libx265.c
- [DH] libavcodec/libxavs.c
- [DH] libavcodec/libxvid.c
- [DH] libavcodec/ljpegenc.c
- [DH] libavcodec/mdec.c
- [DH] libavcodec/mjpega_dump_header_bsf.c
- [DH] libavcodec/mjpegdec.c
- [DH] libavcodec/motionpixels.c
- [DH] libavcodec/movsub_bsf.c
- [DH] libavcodec/mpeg4videodec.c
- [DH] libavcodec/mpegvideo.c
- [DH] libavcodec/mss2.c
- [DH] libavcodec/noise_bsf.c
- [DH] libavcodec/nuv.c
- [DH] libavcodec/nvenc.c
- [DH] libavcodec/options.c
- [DH] libavcodec/parser.c
- [DH] libavcodec/pngenc.c
- [DH] libavcodec/pnm_parser.c
- [DH] libavcodec/proresenc.c
- [DH] libavcodec/qsvdec.c
- [DH] libavcodec/qsvenc.c
- [DH] libavcodec/qsvenc_hevc.c
- [DH] libavcodec/shorten.c
- [DH] libavcodec/startcode.c
- [DH] libavcodec/svq1enc.c
- [DH] libavcodec/tiff.c
- [DH] libavcodec/tiffenc.c
- [DH] libavcodec/truemotion2.c
- [DH] libavcodec/utils.c
- [DH] libavcodec/utvideodec.c
- [DH] libavcodec/utvideoenc.c
- [DH] libavcodec/vc1dec.c
- [DH] libavcodec/wavpack.c
- [DH] libavcodec/wma.h
- [DH] libavcodec/wmadec.c
- [DH] libavcodec/wmalosslessdec.c
- [DH] libavcodec/wmaprodec.c
- [DH] libavcodec/wmavoice.c
- [DH] libavcodec/wnv1.c
- [DH] libavdevice/vfwcap.c
- [DH] libavdevice/xcbgrab.c
- [DH] libavformat/4xm.c
- [DH] libavformat/adxdec.c
- [DH] libavformat/aiffdec.c
- [DH] libavformat/anm.c
- [DH] libavformat/apc.c
- [DH] libavformat/apetag.c
- [DH] libavformat/asfdec.c
- [DH] libavformat/avidec.c
- [DH] libavformat/avio.h
- [DH] libavformat/aviobuf.c
- [DH] libavformat/bink.c
- [DH] libavformat/cafdec.c
- [DH] libavformat/flacdec.c
- [DH] libavformat/flvdec.c
- [DH] libavformat/hevc.c
- [DH] libavformat/id3v2.c
- [DH] libavformat/isom.c
- [DH] libavformat/jvdec.c
- [DH] libavformat/matroskadec.c
- [DH] libavformat/mov.c
- [DH] libavformat/movenc.c
- [DH] libavformat/mpc.c
- [DH] libavformat/mpc8.c
- [DH] libavformat/mpegts.c
- [DH] libavformat/mvi.c
- [DH] libavformat/mxfdec.c
- [DH] libavformat/mxg.c
- [DH] libavformat/nutdec.c
- [DH] libavformat/oggdec.c
- [DH] libavformat/oggparsecelt.c
- [DH] libavformat/oggparseflac.c
- [DH] libavformat/oggparseopus.c
- [DH] libavformat/oggparsespeex.c
- [DH] libavformat/oggparsetheora.c
- [DH] libavformat/oggparsevorbis.c
- [DH] libavformat/omadec.c
- [DH] libavformat/rawdec.c
- [DH] libavformat/rdt.c
- [DH] libavformat/riffdec.c
- [DH] libavformat/rl2.c
- [DH] libavformat/rmdec.c
- [DH] libavformat/rtpdec_h264.c
- [DH] libavformat/rtpdec_hevc.c
- [DH] libavformat/rtpdec_latm.c
- [DH] libavformat/rtpdec_mpeg4.c
- [DH] libavformat/rtpdec_qdm2.c
- [
-
What's the most desireable way to capture system display and audio in the form of individual encoded audio and video packets in go (language) ? [closed]
11 janvier 2023, par Tiger YangQuestion (read the context below first) :


For those of you familiar with the capabilities of go, Is there a better way to go about all this ? Since ffmpeg is so ubiquitous, I'm sure it's been optomized to perfection, but what's the best way to capture system display and audio in the form of individual encoded audio and video packets in go (language), so that they can be then sent via webtransport-go ? I wish for it to prioritize efficiency and low latency, and ideally capture and encode the framebuffer directly like ffmpeg does.


Thanks ! I have many other questions about this, but I think it's best to ask as I go.


Context and what I've done so far :


I'm writing a remote desktop software for my personal use because of grievances with current solutions out there. At the moment, it consists of a web app that uses the webtransport API to send input datagrams and receive AV packets on two dedicated unidirectional streams, and the webcodecs API to decode these packets. On the serverside, I originally planned to use python with the aioquic library as a webtransport server. Upon connection and authentication, the server would start ffmpeg as a subprocess with this command :


ffmpeg -init_hw_device d3d11va -filter_complex ddagrab=video_size=1920x1080:framerate=60 -vcodec hevc_nvenc -tune ll -preset p7 -spatial_aq 1 -temporal_aq 1 -forced-idr 1 -rc cbr -b:v 400K -no-scenecut 1 -g 216000 -f hevc -


What I really appreciate about this is that it uses windows' desktop duplication API to copy the framebuffer of my GPU and hand that directly to the on-die hardware encoder with zero round trips to the CPU. I think it's about as efficient and elegant a solution as I can manage. It then outputs the encoded stream to the stdout, which python can read and send to the client.


As for the audio, there is another ffmpeg instance :


ffmpeg -f dshow -channels 2 -sample_rate 48000 -sample_size 16 -audio_buffer_size 15 -i audio="RD Audio (High Definition Audio Device)" -acodec libopus -vbr on -application audio -mapping_family 0 -apply_phase_inv true -b:a 25K -fec false -packet_loss 0 -map 0 -f data -


which listens to a physical loopback interface, which is literally just a short wire bridging the front panel headphone and microphone jacks (I'm aware of the quality loss of converting to analog and back, but the audio is then crushed down to 25kbps so it's fine) ()


Unfortunately, aioquic was not easy to work with IMO, and I found webtransport-go https://github.com/adriancable/webtransport-go, which was a hell of a lot better in both simplicity and documentation. However, now I'm dealing with a whole new language, and I wanna ask : (above)


EDIT : Here's the code for my server so far :




package main

import (
 "bytes"
 "context"
 "fmt"
 "log"
 "net/http"
 "os/exec"
 "time"

 "github.com/adriancable/webtransport-go"
)

func warn(str string) {
 fmt.Printf("\n===== WARNING ===================================================================================================\n %s\n=================================================================================================================\n", str)
}

func main() {

 password := []byte("abc")

 videoString := []string{
 "ffmpeg",
 "-init_hw_device", "d3d11va",
 "-filter_complex", "ddagrab=video_size=1920x1080:framerate=60",
 "-vcodec", "hevc_nvenc",
 "-tune", "ll",
 "-preset", "p7",
 "-spatial_aq", "1",
 "-temporal_aq", "1",
 "-forced-idr", "1",
 "-rc", "cbr",
 "-b:v", "500K",
 "-no-scenecut", "1",
 "-g", "216000",
 "-f", "hevc", "-",
 }

 audioString := []string{
 "ffmpeg",
 "-f", "dshow",
 "-channels", "2",
 "-sample_rate", "48000",
 "-sample_size", "16",
 "-audio_buffer_size", "15",
 "-i", "audio=RD Audio (High Definition Audio Device)",
 "-acodec", "libopus",
 "-mapping_family", "0",
 "-b:a", "25K",
 "-map", "0",
 "-f", "data", "-",
 }

 connected := false

 http.HandleFunc("/", func(writer http.ResponseWriter, request *http.Request) {
 session := request.Body.(*webtransport.Session)

 session.AcceptSession()
 fmt.Println("\nAccepted incoming WebTransport connection.")
 fmt.Println("Awaiting authentication...")

 authData, err := session.ReceiveMessage(session.Context()) // Waits here till first datagram
 if err != nil { // if client closes connection before sending anything
 fmt.Println("\nConnection closed:", err)
 return
 }

 if len(authData) >= 2 && bytes.Equal(authData[2:], password) {
 if connected {
 session.CloseSession()
 warn("Client has authenticated, but a session is already taking place! Connection closed.")
 return
 } else {
 connected = true
 fmt.Println("Client has authenticated!\n")
 }
 } else {
 session.CloseSession()
 warn("Client has failed authentication! Connection closed. (" + string(authData[2:]) + ")")
 return
 }

 videoStream, _ := session.OpenUniStreamSync(session.Context())

 videoCmd := exec.Command(videoString[0], videoString[1:]...)
 go func() {
 videoOut, _ := videoCmd.StdoutPipe()
 videoCmd.Start()

 buffer := make([]byte, 15000)
 for {
 len, err := videoOut.Read(buffer)
 if err != nil {
 break
 }
 if len > 0 {
 videoStream.Write(buffer[:len])
 }
 }
 }()

 time.Sleep(50 * time.Millisecond)

 audioStream, err := session.OpenUniStreamSync(session.Context())

 audioCmd := exec.Command(audioString[0], audioString[1:]...)
 go func() {
 audioOut, _ := audioCmd.StdoutPipe()
 audioCmd.Start()

 buffer := make([]byte, 15000)
 for {
 len, err := audioOut.Read(buffer)
 if err != nil {
 break
 }
 if len > 0 {
 audioStream.Write(buffer[:len])
 }
 }
 }()

 for {
 data, err := session.ReceiveMessage(session.Context())
 if err != nil {
 videoCmd.Process.Kill()
 audioCmd.Process.Kill()

 connected = false

 fmt.Println("\nConnection closed:", err)
 break
 }

 if len(data) == 0 {

 } else if data[0] == byte(0) {
 fmt.Printf("Received mouse datagram: %s\n", data)
 }
 }

 })

 server := &webtransport.Server{
 ListenAddr: ":1024",
 TLSCert: webtransport.CertFile{Path: "SSL/fullchain.pem"},
 TLSKey: webtransport.CertFile{Path: "SSL/privkey.pem"},
 QuicConfig: &webtransport.QuicConfig{
 KeepAlive: false,
 MaxIdleTimeout: 3 * time.Second,
 },
 }

 fmt.Println("Launching WebTransport server at", server.ListenAddr)
 ctx, cancel := context.WithCancel(context.Background())
 if err := server.Run(ctx); err != nil {
 log.Fatal(err)
 cancel()
 }

}