
Recherche avancée
Médias (2)
-
Granite de l’Aber Ildut
9 septembre 2011, par
Mis à jour : Septembre 2011
Langue : français
Type : Texte
-
Géodiversité
9 septembre 2011, par ,
Mis à jour : Août 2018
Langue : français
Type : Texte
Autres articles (105)
-
Encoding and processing into web-friendly formats
13 avril 2011, parMediaSPIP automatically converts uploaded files to internet-compatible formats.
Video files are encoded in MP4, Ogv and WebM (supported by HTML5) and MP4 (supported by Flash).
Audio files are encoded in MP3 and Ogg (supported by HTML5) and MP3 (supported by Flash).
Where possible, text is analyzed in order to retrieve the data needed for search engine detection, and then exported as a series of image files.
All uploaded files are stored online in their original format, so you can (...) -
Ajouter notes et légendes aux images
7 février 2011, parPour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
Modification lors de l’ajout d’un média
Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...) -
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...)
Sur d’autres sites (9631)
-
FFMpeg open a DVD VOB chain ?
28 octobre 2020, par SugrueI'm new to FFMpeg so this may be a dumb question, but I don't see the answer in the documentation.



I want to decode frames from a DVD vob files. Opening the first VOB in the group works fine, but how do I tell ffmpeg to continue on to the next VOB and read all the VOBs on a DVD ?



I have the VOB files in a folder on a hard disk.


-
Cutting a live stream into separate mp4 files
9 juin 2017, par FearhunterI am doing a research for cutting a live stream in piece and save it as mp4 files. I am using this source for the proof of concept :
And this is the example code I use :
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Net;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.MediaServices.Client;
using Newtonsoft.Json.Linq;
namespace AMSLiveTest
{
class Program
{
private const string StreamingEndpointName = "streamingendpoint001";
private const string ChannelName = "channel001";
private const string AssetlName = "asset001";
private const string ProgramlName = "program001";
// Read values from the App.config file.
private static readonly string _mediaServicesAccountName =
ConfigurationManager.AppSettings["MediaServicesAccountName"];
private static readonly string _mediaServicesAccountKey =
ConfigurationManager.AppSettings["MediaServicesAccountKey"];
// Field for service context.
private static CloudMediaContext _context = null;
private static MediaServicesCredentials _cachedCredentials = null;
static void Main(string[] args)
{
// Create and cache the Media Services credentials in a static class variable.
_cachedCredentials = new MediaServicesCredentials(
_mediaServicesAccountName,
_mediaServicesAccountKey);
// Used the cached credentials to create CloudMediaContext.
_context = new CloudMediaContext(_cachedCredentials);
IChannel channel = CreateAndStartChannel();
// Set the Live Encoder to point to the channel's input endpoint:
string ingestUrl = channel.Input.Endpoints.FirstOrDefault().Url.ToString();
// Use the previewEndpoint to preview and verify
// that the input from the encoder is actually reaching the Channel.
string previewEndpoint = channel.Preview.Endpoints.FirstOrDefault().Url.ToString();
IProgram program = CreateAndStartProgram(channel);
ILocator locator = CreateLocatorForAsset(program.Asset, program.ArchiveWindowLength);
IStreamingEndpoint streamingEndpoint = CreateAndStartStreamingEndpoint();
GetLocatorsInAllStreamingEndpoints(program.Asset);
// Once you are done streaming, clean up your resources.
Cleanup(streamingEndpoint, channel);
}
public static IChannel CreateAndStartChannel()
{
//If you want to change the Smooth fragments to HLS segment ratio, you would set the ChannelCreationOptions’s Output property.
IChannel channel = _context.Channels.Create(
new ChannelCreationOptions
{
Name = ChannelName,
Input = CreateChannelInput(),
Preview = CreateChannelPreview()
});
//Starting and stopping Channels can take some time to execute. To determine the state of operations after calling Start or Stop, query the IChannel.State .
channel.Start();
return channel;
}
private static ChannelInput CreateChannelInput()
{
return new ChannelInput
{
StreamingProtocol = StreamingProtocol.RTMP,
AccessControl = new ChannelAccessControl
{
IPAllowList = new List<iprange>
{
new IPRange
{
Name = "TestChannelInput001",
// Setting 0.0.0.0 for Address and 0 for SubnetPrefixLength
// will allow access to IP addresses.
Address = IPAddress.Parse("0.0.0.0"),
SubnetPrefixLength = 0
}
}
}
};
}
private static ChannelPreview CreateChannelPreview()
{
return new ChannelPreview
{
AccessControl = new ChannelAccessControl
{
IPAllowList = new List<iprange>
{
new IPRange
{
Name = "TestChannelPreview001",
// Setting 0.0.0.0 for Address and 0 for SubnetPrefixLength
// will allow access to IP addresses.
Address = IPAddress.Parse("0.0.0.0"),
SubnetPrefixLength = 0
}
}
}
};
}
public static void UpdateCrossSiteAccessPoliciesForChannel(IChannel channel)
{
var clientPolicy =
@"<?xml version=""1.0"" encoding=""utf-8""?>
<policy>
<domain uri=""></domain>
<resource path=""></resource>"" include-subpaths=""true""/>
</policy>
";
var xdomainPolicy =
@"<?xml version=""1.0"" ?>
";
channel.CrossSiteAccessPolicies.ClientAccessPolicy = clientPolicy;
channel.CrossSiteAccessPolicies.CrossDomainPolicy = xdomainPolicy;
channel.Update();
}
public static IProgram CreateAndStartProgram(IChannel channel)
{
IAsset asset = _context.Assets.Create(AssetlName, AssetCreationOptions.None);
// Create a Program on the Channel. You can have multiple Programs that overlap or are sequential;
// however each Program must have a unique name within your Media Services account.
IProgram program = channel.Programs.Create(ProgramlName, TimeSpan.FromHours(3), asset.Id);
program.Start();
return program;
}
public static ILocator CreateLocatorForAsset(IAsset asset, TimeSpan ArchiveWindowLength)
{
// You cannot create a streaming locator using an AccessPolicy that includes write or delete permissions.
var locator = _context.Locators.CreateLocator
(
LocatorType.OnDemandOrigin,
asset,
_context.AccessPolicies.Create
(
"Live Stream Policy",
ArchiveWindowLength,
AccessPermissions.Read
)
);
return locator;
}
public static IStreamingEndpoint CreateAndStartStreamingEndpoint()
{
var options = new StreamingEndpointCreationOptions
{
Name = StreamingEndpointName,
ScaleUnits = 1,
AccessControl = GetAccessControl(),
CacheControl = GetCacheControl()
};
IStreamingEndpoint streamingEndpoint = _context.StreamingEndpoints.Create(options);
streamingEndpoint.Start();
return streamingEndpoint;
}
private static StreamingEndpointAccessControl GetAccessControl()
{
return new StreamingEndpointAccessControl
{
IPAllowList = new List<iprange>
{
new IPRange
{
Name = "Allow all",
Address = IPAddress.Parse("0.0.0.0"),
SubnetPrefixLength = 0
}
},
AkamaiSignatureHeaderAuthenticationKeyList = new List<akamaisignatureheaderauthenticationkey>
{
new AkamaiSignatureHeaderAuthenticationKey
{
Identifier = "My key",
Expiration = DateTime.UtcNow + TimeSpan.FromDays(365),
Base64Key = Convert.ToBase64String(GenerateRandomBytes(16))
}
}
};
}
private static byte[] GenerateRandomBytes(int length)
{
var bytes = new byte[length];
using (var rng = new RNGCryptoServiceProvider())
{
rng.GetBytes(bytes);
}
return bytes;
}
private static StreamingEndpointCacheControl GetCacheControl()
{
return new StreamingEndpointCacheControl
{
MaxAge = TimeSpan.FromSeconds(1000)
};
}
public static void UpdateCrossSiteAccessPoliciesForStreamingEndpoint(IStreamingEndpoint streamingEndpoint)
{
var clientPolicy =
@"<?xml version=""1.0"" encoding=""utf-8""?>
<policy>
<domain uri=""></domain>
<resource path=""></resource>"" include-subpaths=""true""/>
</policy>
";
var xdomainPolicy =
@"<?xml version=""1.0"" ?>
";
streamingEndpoint.CrossSiteAccessPolicies.ClientAccessPolicy = clientPolicy;
streamingEndpoint.CrossSiteAccessPolicies.CrossDomainPolicy = xdomainPolicy;
streamingEndpoint.Update();
}
public static void GetLocatorsInAllStreamingEndpoints(IAsset asset)
{
var locators = asset.Locators.Where(l => l.Type == LocatorType.OnDemandOrigin);
var ismFile = asset.AssetFiles.AsEnumerable().FirstOrDefault(a => a.Name.EndsWith(".ism"));
var template = new UriTemplate("{contentAccessComponent}/{ismFileName}/manifest");
var urls = locators.SelectMany(l =>
_context
.StreamingEndpoints
.AsEnumerable()
.Where(se => se.State == StreamingEndpointState.Running)
.Select(
se =>
template.BindByPosition(new Uri("http://" + se.HostName),
l.ContentAccessComponent,
ismFile.Name)))
.ToArray();
}
public static void Cleanup(IStreamingEndpoint streamingEndpoint,
IChannel channel)
{
if (streamingEndpoint != null)
{
streamingEndpoint.Stop();
streamingEndpoint.Delete();
}
IAsset asset;
if (channel != null)
{
foreach (var program in channel.Programs)
{
asset = _context.Assets.Where(se => se.Id == program.AssetId)
.FirstOrDefault();
program.Stop();
program.Delete();
if (asset != null)
{
foreach (var l in asset.Locators)
l.Delete();
asset.Delete();
}
}
channel.Stop();
channel.Delete();
}
}
}
}
</akamaisignatureheaderauthenticationkey></iprange></iprange></iprange>Now I want to make a method to cut a live stream for example every 15 minutes and save it as mp4 but don’t know where to start.
Can someone point me in the right direction ?
Kind regards
UPDATE :
I want to save the mp4 files on my hard disk.
-
RTP packets detected as UDP
8 juillet 2024, par fritzHere is what I am trying to do :



WebRTC endpoint > RTP Endpoint > ffmpeg > RTMP server.




This is what my SDP file looks like.



var cm_offer = "v=0\n" +
 "o=- 3641290734 3641290734 IN IP4 127.0.0.1\n" +
 "s=nginx\n" +
 "c=IN IP4 127.0.0.1\n" +
 "t=0 0\n" +
 "m=audio 60820 RTP/AVP 0\n" +
 "a=rtpmap:0 PCMU/8000\n" +
 "a=recvonly\n" +
 "m=video 59618 RTP/AVP 101\n" +
 "a=rtpmap:101 H264/90000\n" +
 "a=recvonly\n";




What's happening is that wireshark can detect the incoming packets at port 59618, but not as RTP packets but UDP packets. I am trying to capture the packets using ffmpeg with the following command :



ubuntu@ip-132-31-40-100:~$ ffmpeg -i udp://127.0.0.1:59618 -vcodec copy stream.mp4
ffmpeg version git-2017-01-22-f1214ad Copyright (c) 2000-2017 the FFmpeg developers
 built with gcc 4.8 (Ubuntu 4.8.4-2ubuntu1~14.04.3)
 configuration: --extra-libs=-ldl --prefix=/opt/ffmpeg --mandir=/usr/share/man --enable-avresample --disable-debug --enable-nonfree --enable-gpl --enable-version3 --enable-libopencore-amrnb --enable-libopencore-amrwb --disable-decoder=amrnb --disable-decoder=amrwb --enable-libpulse --enable-libfreetype --enable-gnutls --enable-libx264 --enable-libx265 --enable-libfdk-aac --enable-libvorbis --enable-libmp3lame --enable-libopus --enable-libvpx --enable-libspeex --enable-libass --enable-avisynth --enable-libsoxr --enable-libxvid --enable-libvidstab --enable-libwavpack --enable-nvenc
 libavutil 55. 44.100 / 55. 44.100
 libavcodec 57. 75.100 / 57. 75.100
 libavformat 57. 63.100 / 57. 63.100
 libavdevice 57. 2.100 / 57. 2.100
 libavfilter 6. 69.100 / 6. 69.100
 libavresample 3. 2. 0 / 3. 2. 0
 libswscale 4. 3.101 / 4. 3.101
 libswresample 2. 4.100 / 2. 4.100
 libpostproc 54. 2.100 / 54. 2.100 




All I get is a blinking cursor and The stream.mp4 file is not written to disk after I exit (ctrl+c).



So can you help me figure out :



- 

- why wireshark cannot detect the packets as RTP (I suspect it has something to do with SDP)
- How to handle SDP answer when the RTP endpoint is pushing to ffmpeg which doesn't send an answer back.







Here is the entire code (hello world tutorial modified)



/*
 * (C) Copyright 2014-2015 Kurento (http://kurento.org/)
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

 function getopts(args, opts)
 {
 var result = opts.default || {};
 args.replace(
 new RegExp("([^?=&]+)(=([^&]*))?", "g"),
 function($0, $1, $2, $3) { result[$1] = decodeURI($3); });

 return result;
 };

 var args = getopts(location.search,
 {
 default:
 {
 ws_uri: 'wss://' + location.hostname + ':8433/kurento',
 ice_servers: undefined
 }
 });

 function setIceCandidateCallbacks(webRtcPeer, webRtcEp, onerror)
 {
 webRtcPeer.on('icecandidate', function(candidate) {
 console.log("Local candidate:",candidate);

 candidate = kurentoClient.getComplexType('IceCandidate')(candidate);

 webRtcEp.addIceCandidate(candidate, onerror)
 });

 webRtcEp.on('OnIceCandidate', function(event) {
 var candidate = event.candidate;

 console.log("Remote candidate:",candidate);

 webRtcPeer.addIceCandidate(candidate, onerror);
 });
 }


 function setIceCandidateCallbacks2(webRtcPeer, rtpEp, onerror)
 {
 webRtcPeer.on('icecandidate', function(candidate) {
 console.log("Localr candidate:",candidate);

 candidate = kurentoClient.getComplexType('IceCandidate')(candidate);

 rtpEp.addIceCandidate(candidate, onerror)
 });
 }


 window.addEventListener('load', function()
 {
 console = new Console();

 var webRtcPeer;
 var pipeline;
 var webRtcEpt;

 var videoInput = document.getElementById('videoInput');
 var videoOutput = document.getElementById('videoOutput');

 var startButton = document.getElementById("start");
 var stopButton = document.getElementById("stop");

 startButton.addEventListener("click", function()
 {
 showSpinner(videoInput, videoOutput);

 var options = {
 localVideo: videoInput,
 remoteVideo: videoOutput
 };


 if (args.ice_servers) {
 console.log("Use ICE servers: " + args.ice_servers);
 options.configuration = {
 iceServers : JSON.parse(args.ice_servers)
 };
 } else {
 console.log("Use freeice")
 }

 webRtcPeer = kurentoUtils.WebRtcPeer.WebRtcPeerSendrecv(options, function(error)
 {
 if(error) return onError(error)

 this.generateOffer(onOffer)
 });

 function onOffer(error, sdpOffer)
 {
 if(error) return onError(error)

 kurentoClient(args.ws_uri, function(error, client)
 {
 if(error) return onError(error);

 client.create("MediaPipeline", function(error, _pipeline)
 {
 if(error) return onError(error);

 pipeline = _pipeline;

 pipeline.create("WebRtcEndpoint", function(error, webRtc){
 if(error) return onError(error);

 webRtcEpt = webRtc;

 setIceCandidateCallbacks(webRtcPeer, webRtc, onError)

 webRtc.processOffer(sdpOffer, function(error, sdpAnswer){
 if(error) return onError(error);

 webRtcPeer.processAnswer(sdpAnswer, onError);
 });
 webRtc.gatherCandidates(onError);

 webRtc.connect(webRtc, function(error){
 if(error) return onError(error);

 console.log("Loopback established");
 });
 });



 pipeline.create("RtpEndpoint", function(error, rtp){
 if(error) return onError(error);

 //setIceCandidateCallbacks2(webRtcPeer, rtp, onError)


 var cm_offer = "v=0\n" +
 "o=- 3641290734 3641290734 IN IP4 127.0.0.1\n" +
 "s=nginx\n" +
 "c=IN IP4 127.0.0.1\n" +
 "t=0 0\n" +
 "m=audio 60820 RTP/AVP 0\n" +
 "a=rtpmap:0 PCMU/8000\n" +
 "a=recvonly\n" +
 "m=video 59618 RTP/AVP 101\n" +
 "a=rtpmap:101 H264/90000\n" +
 "a=recvonly\n";



 rtp.processOffer(cm_offer, function(error, cm_sdpAnswer){
 if(error) return onError(error);

 //webRtcPeer.processAnswer(cm_sdpAnswer, onError);
 });
 //rtp.gatherCandidates(onError);

 webRtcEpt.connect(rtp, function(error){
 if(error) return onError(error);

 console.log("RTP endpoint connected to webRTC");
 });
 });









 });
 });
 }
 });
 stopButton.addEventListener("click", stop);


 function stop() {
 if (webRtcPeer) {
 webRtcPeer.dispose();
 webRtcPeer = null;
 }

 if(pipeline){
 pipeline.release();
 pipeline = null;
 }

 hideSpinner(videoInput, videoOutput);
 }

 function onError(error) {
 if(error)
 {
 console.error(error);
 stop();
 }
 }
 })


 function showSpinner() {
 for (var i = 0; i < arguments.length; i++) {
 arguments[i].poster = 'img/transparent-1px.png';
 arguments[i].style.background = "center transparent url('img/spinner.gif') no-repeat";
 }
 }

 function hideSpinner() {
 for (var i = 0; i < arguments.length; i++) {
 arguments[i].src = '';
 arguments[i].poster = 'img/webrtc.png';
 arguments[i].style.background = '';
 }
 }

 /**
 * Lightbox utility (to display media pipeline image in a modal dialog)
 */
 $(document).delegate('*[data-toggle="lightbox"]', 'click', function(event) {
 event.preventDefault();
 $(this).ekkoLightbox();
 });