
Recherche avancée
Médias (2)
-
Granite de l’Aber Ildut
9 septembre 2011, par
Mis à jour : Septembre 2011
Langue : français
Type : Texte
-
Géodiversité
9 septembre 2011, par ,
Mis à jour : Août 2018
Langue : français
Type : Texte
Autres articles (54)
-
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...) -
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir
Sur d’autres sites (11562)
-
select a screen section ffmpeg c++ on macos
21 avril 2022, par C1ngh10I am trying to record the screen on macos, with ffmpeg. I would like to be able to select a section of the screen instead of the whole desktop. I tried to set several options such as
vf
, with values like"crop=150:150:0:0"
, orvideo_size
with value"150x150"
. The result was that the output video had the correct dimensions (150x150) but whole screen has been recorded instead of the specified section. Does anybody know another way to do that ?

int ScreenRecorder::openVideoDevice() {
 value = 0;
 videoOptions = nullptr;
 pAVFormatContext = nullptr;

 pAVFormatContext = avformat_alloc_context();


 string dimension = to_string(width) + "x" + to_string(height);
 av_dict_set(&videoOptions, "video_size", dimension.c_str(), 0); //option to set the dimension of the screen section to record
 value = av_dict_set(&videoOptions, "framerate", "25", 0);
 if (value < 0) {
 cerr << "Error in setting dictionary value (setting framerate)" << endl;
 exit(-1);
 }

 value = av_dict_set(&videoOptions, "preset", "ultrafast", 0);
 if (value < 0) {
 cerr << "Error in setting dictionary value (setting preset value)" << endl;
 exit(-1);
 }

 //The distance from the left edge of the screen or desktop
 value = av_dict_set(&videoOptions, "vf", ("crop=" + to_string(width) + ":" + to_string(height) + ":" + to_string(x_offset) + ":" +
 to_string(y_offset)).c_str(), 0);

 if (value < 0) {
 cerr << "Error in setting crop" << endl;
 exit(-1);
 }

 value = av_dict_set(&videoOptions, "pixel_format", "yuv420p", 0);
 if (value < 0) {
 cerr << "Error in setting pixel format" << endl;
 exit(-1);
 }
 
 pAVInputFormat = av_find_input_format("avfoundation");

 if (avformat_open_input(&pAVFormatContext, "1:none", pAVInputFormat, &videoOptions) != 0) {
 cerr << "Error in opening input device" << endl;
 exit(-1);
 }
 //get video stream infos from context
 value = avformat_find_stream_info(pAVFormatContext, nullptr);
 if (value < 0) {
 cerr << "Error in retrieving the stream info" << endl;
 exit(-1);
 }

 VideoStreamIndx = -1;
 for (int i = 0; i < pAVFormatContext->nb_streams; i++) {
 if (pAVFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
 VideoStreamIndx = i;
 break;
 }
 }
 if (VideoStreamIndx == -1) {
 cerr << "Error: unable to find video stream index" << endl;
 exit(-2);
 }

 pAVCodecContext = pAVFormatContext->streams[VideoStreamIndx]->codec;
 pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id/*params->codec_id*/);
 if (pAVCodec == nullptr) {
 cerr << "Error: unable to find decoder video" << endl;
 exit(-1);
 }

 return 0;
}



-
Live555 Multicast RTSP Server - Display in FFMPEG
31 mai 2021, par BanBI made a multicast RTSP server using the Live555 library.

It is displayed by VLC but not from VMS (NX Witness) or FFMPEG.

If I use the Unicast, I can display it, but I do not know how to change the file that is streaming in the Unicast.

Is there a way to change the streaming file in Unicast or make it viewable in FFMPEG with Multicast ?

This is my code.




Header




#include <iostream>
#include <string>
#include <filesystem>
#include <fstream>
#include <ctime>
#include 
#include 
#include 
#include <opencv2></opencv2>videoio.hpp>
#include <opencv2></opencv2>imgcodecs.hpp>

void Start(const char* streamPath, int port);
void AddDataL(BYTE* data, int length);
AddDataHW(BYTE* data, int height, int width);

void Play();
void AfterPlaying(void*);

void EncodeToH264(cv::Mat image);
long long GetTimeMs();
int GetFileCount();
const char* GetFirstFilePath();

const int fourcc = cv::VideoWriter::fourcc('x', '2', '6', '4');

UsageEnvironment* env;
H264ServerMediaSubsession* h264;
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;

const char* dataPath = "data/";
const char* extension = ".h264";
</ctime></fstream></filesystem></string></iostream>




CPP




void Start()
{
 askScheduler *scheduler = BasicTaskScheduler::createNew();
 env = BasicUsageEnvironment::createNew(*scheduler);

 RTSPServer *rtspServer = RTSPServer::createNew(*env, 8554);

 if (rtspServer == NULL)
 throw;

 struct sockaddr_storage destinationAddress;
 destinationAddress.ss_family = AF_INET;
 ((struct sockaddr_in &)destinationAddress).sin_addr.s_addr = chooseRandomIPv4SSMAddress(*env);

 const unsigned short rtpPortNum = 18888;
 const unsigned short rtcpPortNum = rtpPortNum + 1;
 const unsigned char ttl = 255;

 const Port rtpPort(rtpPortNum);
 const Port rtcpPort(rtcpPortNum);

 Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
 rtpGroupsock.multicastSendOnly();
 Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
 rtcpGroupsock.multicastSendOnly();

 OutPacketBuffer::maxSize = 300000;
 videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

 const unsigned estimatedSessionBandwidth = 5000;
 const unsigned maxCNAMElen = 100;
 unsigned char CNAME[maxCNAMElen + 1];
 gethostname((char *)CNAME, maxCNAMElen);
 CNAME[maxCNAMElen] = '\0';

 RTCPInstance *rtcp = RTCPInstance::createNew(*env, &rtcpGroupsock, estimatedSessionBandwidth, CNAME, videoSink, NULL, True);

 ServerMediaSession *sms = ServerMediaSession::createNew(*env, "live", "live", "Session streamed by \"Test Server\"", True);
 sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
 rtspServer->addServerMediaSession(sms);

 *env << "Start > " << rtspServer->rtspURL(sms) << "\n";

 Play();

 env->taskScheduler().doEventLoop();
}

void AddData(BYTE* data, int length)
{
 EncodeToH264(cv::imdecode(cv::Mat(1, length, CV_8UC3, data), 1));
}

void EncodeToH264(cv::Mat image)
{
 std::string fileName = dataPath + std::to_string(GetTimeMs()) + extension;
 cv::VideoWriter writer(fileName.c_str(), fourcc, 1, image.size());
 writer.write(image);
 writer.write(image);
 writer.release();
}

void Play()
{
 while (GetFileCount() == 0);

 const char* fileName = GetFirstFilePath();

 ByteStreamFileSource* fileSource;
 while ((fileSource = ByteStreamFileSource::createNew(*env, fileName)) == NULL);

 FramedSource* videoES = fileSource;

 videoSource = H264VideoStreamFramer::createNew(*env, videoES);

 videoSink->startPlaying(*videoSource, AfterPlaying, videoSink);
}

void AfterPlaying(void*)
{
 videoSink->stopPlaying();
 Medium::close(videoSource);

 if (GetFileCount() > 1)
 std::filesystem::remove(GetFirstFilePath());

 Play();
}

long long GetTimeMs()
{
 return std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()).count();
}

int GetFileCount() 
{
 return std::distance(std::filesystem::directory_iterator(dataPath), std::filesystem::directory_iterator());
}

const char* GetFirstFilePath()
{
 for (const auto& entry : std::filesystem::directory_iterator(dataPath))
 return entry.path().string().c_str();
}



-
ffmpeg motions not working properly while using multiple urls
22 avril 2021, par al pacinoI am trying to apply ffmpeg transitions but sometimes video gets concatenated without any transition. In the command below I have used input 1 which in present on derectory and second input is online video url. the output video got concatenated without
distance
transition -

one example of distance transition command is below-


ffmpeg -i video1.webm -i 'url2' -filter_complex '[0]scale=1280:720,settb=AVTB[v0];[1]scale=1280:720,settb=AVTB[v1];[v0][v1]xfade=transition=distance:duration=1:offset=9,format=yuv420p’ output_video.webm



and log is below-


ffmpeg version n4.3.1 Copyright (c) 2000-2020 the FFmpeg developers
 built with gcc 7 (Ubuntu 7.5.0-3ubuntu1~18.04)
 configuration: --prefix= --prefix=/usr --disable-debug --disable-doc --disable-static --enable-cuda --enable-cuda-sdk --enable-cuvid --enable-libdrm --enable-ffplay --enable-gnutls --enable-gpl --enable-libass --enable-libfdk-aac --enable-libfontconfig --enable-libfreetype --enable-libmp3lame --enable-libnpp --enable-libopencore_amrnb --enable-libopencore_amrwb --enable-libopus --enable-libpulse --enable-sdl2 --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libv4l2 --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libx265 --enable-libxcb --enable-libxvid --enable-nonfree --enable-nvenc --enable-omx --enable-openal --enable-opencl --enable-runtime-cpudetect --enable-shared --enable-vaapi --enable-vdpau --enable-version3 --enable-xlib
 libavutil 56. 51.100 / 56. 51.100
 libavcodec 58. 91.100 / 58. 91.100
 libavformat 58. 45.100 / 58. 45.100
 libavdevice 58. 10.100 / 58. 10.100
 libavfilter 7. 85.100 / 7. 85.100
 libswscale 5. 7.100 / 5. 7.100
 libswresample 3. 7.100 / 3. 7.100
 libpostproc 55. 7.100 / 55. 7.100
Input #0, matroska,webm, from '/home/abc/xyz/887c28b7-8819-42d3-bb4f-95c44a72bce9.webm':
 Metadata:
 MINOR_VERSION : 0
 COMPATIBLE_BRANDS: mp42mp41isomavc1
 MAJOR_BRAND : mp42
 ENCODER : Lavf58.45.100
 Duration: 00:00:21.03, start: -0.006000, bitrate: 346 kb/s
 Stream #0:0: Video: vp9 (Profile 0), yuv420p(tv), 480x360, SAR 1:1 DAR 4:3, 30 fps, 30 tbr, 1k tbn, 1k tbc (default)
 Metadata:
 ENCODER : Lavc58.91.100 libvpx-vp9
 DURATION : 00:00:21.033000000
 Stream #0:1: Audio: opus, 48000 Hz, stereo, fltp (default)
 Metadata:
 HANDLER_NAME : L-SMASH Audio Handler
 ENCODER : Lavc58.91.100 libopus
 DURATION : 00:00:10.013000000
Input #1, mov,mp4,m4a,3gp,3g2,mj2, from 'https://player.vimeo.com/external/521230771.sd.mp4?s=9ee7ac91c97713d955848e34deac37d6d03d76a8&profile_id=164':
 Metadata:
 major_brand : mp42
 minor_version : 0
 compatible_brands: mp42mp41isomavc1
 creation_time : 2021-03-09T03:42:34.000000Z
 Duration: 00:00:10.01, start: 0.000000, bitrate: 619 kb/s
 Stream #1:0(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p(tv, smpte170m), 640x360, 488 kb/s, 30 fps, 30 tbr, 30 tbn, 60 tbc (default)
 Metadata:
 creation_time : 2021-03-09T03:42:34.000000Z
 handler_name : L-SMASH Video Handler
 encoder : AVC Coding
 Stream #1:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 48000 Hz, stereo, fltp, 125 kb/s (default)
 Metadata:
 creation_time : 2021-03-09T03:42:34.000000Z
 handler_name : L-SMASH Audio Handler
Stream mapping:
 Stream #0:0 (vp9) -> scale (graph 0)
 Stream #1:0 (h264) -> scale (graph 0)
 format (graph 0) -> Stream #0:0 (libvpx-vp9)
 Stream #0:1 -> #0:1 (opus (native) -> opus (libopus))
Press [q] to stop, [?] for help
[libopus @ 0x5597e87c5bc0] No bit rate set. Defaulting to 96000 bps.
[libvpx-vp9 @ 0x5597e87c46c0] v1.7.0
[libvpx-vp9 @ 0x5597e87c46c0] Neither bitrate nor constrained quality specified, using default CRF of 32
Output #0, webm, to '/home/abc/afcb26e1-9c7a-4065-be6e-d1a2940ee0d9.webm':
 Metadata:
 MINOR_VERSION : 0
 COMPATIBLE_BRANDS: mp42mp41isomavc1
 MAJOR_BRAND : mp42
 encoder : Lavf58.45.100
 Stream #0:0: Video: vp9 (libvpx-vp9), yuv420p, 480x360 [SAR 1:1 DAR 4:3], q=-1--1, 30 fps, 1k tbn, 30 tbc (default)
 Metadata:
 encoder : Lavc58.91.100 libvpx-vp9
 Side data:
 cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: N/A
 Stream #0:1: Audio: opus (libopus), 48000 Hz, stereo, flt, 96 kb/s (default)
 Metadata:
 HANDLER_NAME : L-SMASH Audio Handler
 DURATION : 00:00:10.013000000
 encoder : Lavc58.91.100 libopus
frame= 858 fps= 44 q=0.0 Lsize= 1141kB time=00:00:29.00 bitrate= 322.2kbits/s speed=1.48x 
video:1130kB audio:1kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.861019%