
Recherche avancée
Médias (1)
-
SWFUpload Process
6 septembre 2011, par
Mis à jour : Septembre 2011
Langue : français
Type : Texte
Autres articles (67)
-
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir -
MediaSPIP 0.1 Beta version
25 avril 2011, parMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
Submit bugs and patches
13 avril 2011Unfortunately a software is never perfect.
If you think you have found a bug, report it using our ticket system. Please to help us to fix it by providing the following information : the browser you are using, including the exact version as precise an explanation as possible of the problem if possible, the steps taken resulting in the problem a link to the site / page in question
If you think you have solved the bug, fill in a ticket and attach to it a corrective patch.
You may also (...)
Sur d’autres sites (10887)
-
How to Save and Display a Video Simultaneously using C# Aforge.NET framework ?
8 avril 2014, par AkshayI am able to display the video from my webcam or any other integrated device into a picturebox . Also i am able to Save the video into an avi file using FFMPEG DLL files.
I want to do both things simultaneously ie Save the video in the avi file as well as at the same time display the live feed too.
This is for a surveillance project where i want to monitor the live feed and save those too.using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using AForge.Video;
using AForge.Video.DirectShow;
using AForge.Video.FFMPEG;
using AForge.Video.VFW;
using System.Drawing.Imaging;
using System.IO;
namespace cam_aforge1
{
public partial class Form1 : Form
{
private bool DeviceExist = false;
private FilterInfoCollection videoDevices;
private VideoCaptureDevice videoSource = null;
public Form1()
{
InitializeComponent();
}
private void getCamList()
{
try
{
videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
comboBox1.Items.Clear();
if (videoDevices.Count == 0)
throw new ApplicationException();
DeviceExist = true;
foreach (FilterInfo device in videoDevices)
{
comboBox1.Items.Add(device.Name);
}
comboBox1.SelectedIndex = 0; //make dafault to first cam
}
catch (ApplicationException)
{
DeviceExist = false;
comboBox1.Items.Add("No capture device on your system");
}
}
private void rfsh_Click(object sender, EventArgs e)
{
getCamList();
}
private void start_Click(object sender, EventArgs e)
{
if (start.Text == "&Start")
{
if (DeviceExist)
{
videoSource = new VideoCaptureDevice(videoDevices[comboBox1.SelectedIndex].MonikerString);
videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame);
videoSource.NewFrame += new NewFrameEventHandler(video_NewFrameSave);
CloseVideoSource();
videoSource.DesiredFrameSize = new Size(160, 120);
//videoSource.DesiredFrameRate = 10;
videoSource.Start();
label2.Text = "Device running...";
start.Text = "&Stop";
timer1.Enabled = true;
}
else
{
label2.Text = "Error: No Device selected.";
}
}
else
{
if (videoSource.IsRunning)
{
timer1.Enabled = false;
CloseVideoSource();
label2.Text = "Device stopped.";
start.Text = "&Start";
}
}
}
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs)
{
Bitmap img = (Bitmap)eventArgs.Frame.Clone();
pictureBox1.Image = img;
}
Bitmap imgsave;
private void video_NewFrameSave(object sender, NewFrameEventArgs eventArgs)
{
imgsave = (Bitmap)eventArgs.Frame.Clone();
}
private void CloseVideoSource()
{
if (!(videoSource == null))
if (videoSource.IsRunning)
{
videoSource.SignalToStop();
videoSource = null;
}
}
private void timer1_Tick(object sender, EventArgs e)
{
label2.Text = "Device running... " + videoSource.FramesReceived.ToString() + " FPS";
}
private void Form1_FormClosed(object sender, FormClosedEventArgs e)
{
CloseVideoSource();
}
private void Form1_Load(object sender, EventArgs e)
{
}
VideoFileWriter writer;
private void button1_Click(object sender, EventArgs e)
{
int width = 640;
int height = 480;
writer = new VideoFileWriter();
writer.Open("test.avi", width, height, 75, VideoCodec.MPEG4);
for (int i = 0; i < 5000; i++)
{
writer.WriteVideoFrame(imgsave);
}
}
private void button2_Click(object sender, EventArgs e)
{
writer.Close();
}
}
}Thanks in advance.
-
If I pass this code in Windows console it works, but when I emulate windows console in node.js code doesn't work, and returns unclear error
22 décembre 2016, par Maxim CherevatovI have code :
cmd.get(
'trimp3 ant.mp3 ant2.mp3 00:00 00:20',
function(data){
console.log('the node-cmd cloned dir contains these files :\n\n',data)
}
);If pass this code in Windows console it works well !
But, when i emulate windows console in node.js this code not work, and returns unclear mistake :[!!] ERROR: "ffmpeg" �� ����� ����७��� ��� ���譥�
��������, �ᯮ��塞�� �ணࠬ��� ��� �������� 䠩���.To emulate the use node-cmd.
-
Overlay filter in LibAV/FFMpeg returns strange (tripled) frame in C
28 juillet 2014, par gkuczeraI tried to make a program, which merges two frames. I use LibAV (libav-win32-20140428) under Windows 7 64 and Visual Studio 2013.
But the result is quite odd.The filter which was used is Overlay. When I change the graph, to the one, that uses only one stream and add FADE effect, everything works like a charm. But OVERLAY and eg. DRAWBOX give me strange distortion (three frames on one and black and white effect). Here is the code :
static int init_filter_graph(AVFilterGraph **pGraph, AVFilterContext **pSrc1, AVFilterContext **pSink)
{
AVFilterGraph* tFilterGraph;
AVFilterContext* tBufferContext1;
AVFilter* tBuffer1;
AVFilterContext* tColorContext;
AVFilter* tColor;
AVFilterContext* tOverlayContext;
AVFilter* tOverlay;
AVFilterContext* tBufferSinkContext;
AVFilter* tBufferSink;
int tError;
/* Create a new filtergraph, which will contain all the filters. */
tFilterGraph = avfilter_graph_alloc();
if (!tFilterGraph) {
return -1;
}
{ // BUFFER FILTER 1
tBuffer1 = avfilter_get_by_name("buffer");
if (!tBuffer1) {
return -1;
}
tBufferContext1 = avfilter_graph_alloc_filter(tFilterGraph, tBuffer1, "src1");
if (!tBufferContext1) {
return -1;
}
av_dict_set(&tOptionsDict, "width", "320", 0);
av_dict_set(&tOptionsDict, "height", "240", 0);
av_dict_set(&tOptionsDict, "pix_fmt", "bgr24", 0);
av_dict_set(&tOptionsDict, "time_base", "1/25", 0);
av_dict_set(&tOptionsDict, "sar", "1", 0);
tError = avfilter_init_dict(tBufferContext1, &tOptionsDict);
av_dict_free(&tOptionsDict);
if (tError < 0) {
return tError;
}
}
{ // COLOR FILTER
tColor = avfilter_get_by_name("color");
if (!tColor) {
return -1;
}
tColorContext = avfilter_graph_alloc_filter(tFilterGraph, tColor, "color");
if (!tColorContext) {
return -1;
}
av_dict_set(&tOptionsDict, "color", "white", 0);
av_dict_set(&tOptionsDict, "size", "20x120", 0);
av_dict_set(&tOptionsDict, "framerate", "1/25", 0);
tError = avfilter_init_dict(tColorContext, &tOptionsDict);
av_dict_free(&tOptionsDict);
if (tError < 0) {
return tError;
}
}
{ // OVERLAY FILTER
tOverlay = avfilter_get_by_name("overlay");
if (!tOverlay) {
return -1;
}
tOverlayContext = avfilter_graph_alloc_filter(tFilterGraph, tOverlay, "overlay");
if (!tOverlayContext) {
return -1;
}
av_dict_set(&tOptionsDict, "x", "0", 0);
av_dict_set(&tOptionsDict, "y", "0", 0);
av_dict_set(&tOptionsDict, "main_w", "120", 0);
av_dict_set(&tOptionsDict, "main_h", "140", 0);
av_dict_set(&tOptionsDict, "overlay_w", "320", 0);
av_dict_set(&tOptionsDict, "overlay_h", "240", 0);
tError = avfilter_init_dict(tOverlayContext, &tOptionsDict);
av_dict_free(&tOptionsDict);
if (tError < 0) {
return tError;
}
}
{ // BUFFERSINK FILTER
tBufferSink = avfilter_get_by_name("buffersink");
if (!tBufferSink) {
return -1;
}
tBufferSinkContext = avfilter_graph_alloc_filter(tFilterGraph, tBufferSink, "sink");
if (!tBufferSinkContext) {
return -1;
}
tError = avfilter_init_str(tBufferSinkContext, NULL);
if (tError < 0) {
return tError;
}
}
// Linking graph
tError = avfilter_link(tBufferContext1, 0, tOverlayContext, 0);
if (tError >= 0) {
tError = avfilter_link(tColorContext, 0, tOverlayContext, 1);
}
if (tError >= 0) {
tError = avfilter_link(tOverlayContext, 0, tBufferSinkContext, 0);
}
if (tError < 0) {
return tError;
}
tError = avfilter_graph_config(tFilterGraph, NULL);
if (tError < 0) {
return tError;
}
*pGraph = tFilterGraph;
*pSrc1 = tBufferContext1;
*pSink = tBufferSinkContext;
return 0;
}What do you think is the reason ?