
Recherche avancée
Autres articles (111)
-
La file d’attente de SPIPmotion
28 novembre 2010, parUne file d’attente stockée dans la base de donnée
Lors de son installation, SPIPmotion crée une nouvelle table dans la base de donnée intitulée spip_spipmotion_attentes.
Cette nouvelle table est constituée des champs suivants : id_spipmotion_attente, l’identifiant numérique unique de la tâche à traiter ; id_document, l’identifiant numérique du document original à encoder ; id_objet l’identifiant unique de l’objet auquel le document encodé devra être attaché automatiquement ; objet, le type d’objet auquel (...) -
Contribute to documentation
13 avril 2011Documentation is vital to the development of improved technical capabilities.
MediaSPIP welcomes documentation by users as well as developers - including : critique of existing features and functions articles contributed by developers, administrators, content producers and editors screenshots to illustrate the above translations of existing documentation into other languages
To contribute, register to the project users’ mailing (...) -
Ajouter des informations spécifiques aux utilisateurs et autres modifications de comportement liées aux auteurs
12 avril 2011, parLa manière la plus simple d’ajouter des informations aux auteurs est d’installer le plugin Inscription3. Il permet également de modifier certains comportements liés aux utilisateurs (référez-vous à sa documentation pour plus d’informations).
Il est également possible d’ajouter des champs aux auteurs en installant les plugins champs extras 2 et Interface pour champs extras.
Sur d’autres sites (7223)
-
Segfault after 15 seconds while reading a RTSP stream with ffmpeg in Android
27 juillet 2013, par ABentSpoonI'm trying to read a RTSP stream using ffmpeg by way of javacv. I'm able to view the fist 15 seconds or so before I get a segfault on the call to sws_scale. Does this mean I've ran out of memory ? If so, any idea what I'm doing wrong ?
av_register_all();
avcodec.avcodec_register_all();
avformat_network_init();
AVFormatContext avFormatContext = avformat.avformat_alloc_context();
if(0 != avformat_open_input(avFormatContext, "rtsp://192.168.0.107:7654/lov2.ffm", null, null)){
throw new RuntimeException("avformat_open_input failed");
}
if(avformat_find_stream_info(avFormatContext, null) < 0){
throw new RuntimeException("avformat_find_stream_info failed");
}
int video_stream_index = 0;
//search video stream
for(int i = 0; i < avFormatContext.nb_streams(); i++){
if(avFormatContext.streams(i).codec().codec_type() == AVMEDIA_TYPE_VIDEO){
video_stream_index = i;
}
}
Log.d(TAG, "checkpoint 3");
AVPacket packet = new AVPacket();
av_init_packet(packet);
AVStream stream = new AVStream();
int cnt = 0;
//start reading packets from stream and write them to file
av_read_play(avFormatContext);//play RTSP
AVCodecContext avCodecContext = avFormatContext.streams(video_stream_index).codec();
AVCodec codec = avcodec_find_decoder(avCodecContext.codec_id());
if (codec == null){
throw new RuntimeException("codec not found");
}
if (avcodec_open2(avCodecContext, codec, null) < 0){
throw new RuntimeException("could not open codec");
}
SwsContext img_convert_ctx = new SwsContext();
img_convert_ctx = swscale.sws_getCachedContext(
img_convert_ctx,
avCodecContext.width(), avCodecContext.height(),
avCodecContext.pix_fmt(), avCodecContext.width(), avCodecContext.height(),
PIX_FMT_RGB24, SWS_BICUBIC, null, null, null
);
int size = avpicture_get_size(PIX_FMT_YUV420P, avCodecContext.width(), avCodecContext.height());
BytePointer picture_buf = new BytePointer(av_malloc(size));
AVFrame frame = avcodec_alloc_frame();
AVFrame picrgb = avcodec_alloc_frame();
int size2 = avpicture_get_size(avutil.PIX_FMT_RGB24, avCodecContext.width(), avCodecContext.height());
BytePointer picture_buf2 = new BytePointer(av_malloc(size2));
avpicture_fill(frame, picture_buf, PIX_FMT_YUV420P, avCodecContext.width(), avCodecContext.height());
avpicture_fill(picrgb, picture_buf2, avutil.PIX_FMT_RGB24, avCodecContext.width(), avCodecContext.height());
Log.d(TAG, String.format("h: %d, w: %d; h: %d w: %d", avCodecContext.width(), avCodecContext.height(), avCodecContext.height(), frame.width()));
Log.d(TAG, "checkpoint 6");
Bitmap bmp = Bitmap.createBitmap(avCodecContext.width(), avCodecContext.height(), Bitmap.Config.RGB_565);
while(av_read_frame(avFormatContext, packet) >= 0){
if(packet.size() > 0 && packet.stream_index() == video_stream_index){//packet is video
int[] gotFrame = new int[]{0};
int len = avcodec_decode_video2(avCodecContext, frame, gotFrame, packet);
if(gotFrame[0] > 0){
swscale.sws_scale(
img_convert_ctx,
frame.data(0), frame.linesize(),
0, avCodecContext.height(),
picrgb.data(0), picrgb.linesize());
bmp.copyPixelsFromBuffer(picture_buf2.asByteBuffer());
videoView.drawFrame(bmp);
}
}
av_free_packet(packet);
av_init_packet(packet);
}
av_free(frame);
av_free(picrgb);
av_free(picture_buf);
av_free(picture_buf2);
av_read_pause(avFormatContext);Stack trace :
F/libc ( 7460): Fatal signal 11 (SIGSEGV) at 0x76100005 (code=1), thread 7474 (Thread-13601)
E/Sensors ( 720): accelHandler 0.201182 0.180824 10.950029
I/DEBUG ( 6409): *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***
I/DEBUG ( 6409): Build fingerprint: 'Verizon/d2vzw/d2vzw:4.1.2/JZO54K/I535VRBMB1:user/release-keys'
I/DEBUG ( 6409): pid: 7460, tid: 7474, name: Thread-13601 >>> com.example.javacv.stream.test2 <<<
I/DEBUG ( 6409): signal 11 (SIGSEGV), code 1 (SEGV_MAPERR), fault addr 76100005
I/DEBUG ( 6409): r0 57995360 r1 594f3c30 r2 59349200 r3 59349020
I/DEBUG ( 6409): r4 59349020 r5 76100005 r6 00000060 r7 594f3cf0
I/DEBUG ( 6409): r8 59349200 r9 59349020 sl 000001e0 fp 76100005
I/DEBUG ( 6409): ip 594f3cf0 sp 5e08bb08 lr 59308bec pc 593103b8 cpsr 00000010FAQ :
- Why not use MediaPlayer ?
MediaPlayer forces 2 seconds of latency on the stream, I really need <100ms.
-
changing frame-rate for .mov
5 juin 2013, par Ryan SaxeSo I have a script in python that uses
urllib
to download a bunch of images and I then use ImageMagick to add labels to the images and then throw them into a MOV file.My issue is that the MOV file is going at 30 frames per second, and that is way too fast for me to be able to watch what is happening. I need something that is more like 5-10 frames per second.
Is there a way to do this in ImageMagick, FFmpeg or any other easily accessible and cross platform package ?
EDIT 1 : showing my ImageMagick
convert -quality 100 *.png my_movie.mov
I wasn't able to find it, but is there some line I add like -frameRate or something like that to allow me to choose the frame rate ? I can also use FFmpeg along with it so if anybody knows, that would be greay
-
I am using ffmpeg java library to convert captured screenshots to video. Video output is blurry
2 octobre 2020, par dark princeI am using ffmpeg java library to convert captured screenshots to video. Video which is generated as output is blurry.


I am using bit rate as 9000, frames per sec as 25 and video size as that of desktop screen size.


Any suggestions on how to solve this issue.


P.S. I cannot use ffmpeg.exe and command line due to certain restrictions and hence I am opting for ffmpeg java library.


Any suggestions on the issue or suggestions on any better approach will be helpful.


import java.awt.AWTException;
 import java.awt.Dimension;
 import java.awt.FlowLayout;
 import java.awt.Rectangle;
 import java.awt.Robot;
 import java.awt.Toolkit;
 import java.awt.event.ActionEvent;
 import java.awt.event.ActionListener;
 import java.awt.image.BufferedImage;
 import java.io.File;
 import java.io.IOException;
 import java.util.Date;
 
 import javax.imageio.ImageIO;
 import javax.swing.JButton;
 import javax.swing.JFrame;
 import javax.swing.JLabel;
 import javax.swing.JOptionPane;
 
 import org.bytedeco.javacpp.avcodec;
 import org.bytedeco.javacv.FFmpegFrameRecorder;
 import org.bytedeco.javacv.OpenCVFrameConverter;
 
 public class ScreenRecorder{
 
 public static boolean videoComplete=false;
 public static String inputImageDir="inputImgFolder"+File.separator;
 public static String inputImgExt="png";
 public static String outputVideo="recording.mp4"; 
 public static int counter=0;
 public static int imgProcessed=0;
 public static FFmpegFrameRecorder recorder=null;
 public static int videoWidth=1920;
 public static int videoHeight=1080;
 public static int videoFrameRate=3;
 public static int videoQuality=0; // 0 is the max quality
 public static int videoBitRate=9000;
 public static String videoFormat="mp4";
 public static int videoCodec=avcodec.AV_CODEC_ID_MPEG4;
 public static Thread t1=null;
 public static Thread t2=null;
 public static JFrame frame=null;
 public static boolean isRegionSelected=false;
 public static int c1=0;
 public static int c2=0;
 public static int c3=0;
 public static int c4=0;
 
 
 public static void main(String[] args) {
 
 try {
 if(getRecorder()==null)
 {
 System.out.println("Cannot make recorder object, Exiting program");
 System.exit(0);
 }
 if(getRobot()==null)
 {
 System.out.println("Cannot make robot object, Exiting program");
 System.exit(0);
 }
 File scanFolder=new File(inputImageDir);
 scanFolder.delete();
 scanFolder.mkdirs();
 
 createGUI();
 } catch (Exception e) {
 System.out.println("Exception in program "+e.getMessage());
 }
 }
 
 public static void createGUI()
 {
 frame=new JFrame("Screen Recorder");
 JButton b1=new JButton("Select Region for Recording");
 JButton b2=new JButton("Start Recording");
 JButton b3=new JButton("Stop Recording");
 JLabel l1=new JLabel("<br />If you dont select a region then full screen recording <br /> will be made when you click on Start Recording");
 l1.setFont (l1.getFont ().deriveFont (20.0f));
 b1.addActionListener(new ActionListener() {
 @Override
 public void actionPerformed(ActionEvent e) {
 try {
 JOptionPane.showMessageDialog(frame, "A new window will open. Use your mouse to select the region you like to record");
 new CropRegion().getImage();
 } catch (Exception e1) {
 // TODO Auto-generated catch block
 System.out.println("Issue while trying to call the module to crop region");
 e1.printStackTrace();
 } 
 }
 });
 b2.addActionListener(new ActionListener() {
 @Override
 public void actionPerformed(ActionEvent e) {
 counter=0;
 startRecording();
 }
 });
 b3.addActionListener(new ActionListener() {
 @Override
 public void actionPerformed(ActionEvent e) {
 stopRecording();
 System.out.print("Exiting...");
 System.exit(0);
 }
 });
 
 frame.add(b1);
 frame.add(b2);
 frame.add(b3);
 frame.add(l1);
 frame.setLayout(new FlowLayout(0));
 frame.setVisible(true);
 frame.setSize(1000, 170);
 frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
 }
 
 public static void startRecording()
 {
 t1=new Thread()
 {
 public void run() {
 try {
 takeScreenshot(getRobot());
 } catch (Exception e) {
 JOptionPane.showMessageDialog(frame, "Cannot make robot object, Exiting program "+e.getMessage());
 System.out.println("Cannot make robot object, Exiting program "+e.getMessage());
 System.exit(0);
 }
 }
 };
 
 t2=new Thread()
 {
 public void run() {
 prepareVideo();
 }
 };
 
 t1.start();
 t2.start();
 System.out.println("Started recording at "+new Date());
 }
 
 public static Robot getRobot() throws Exception
 {
 Robot r=null;
 try {
 r = new Robot();
 return r;
 } catch (AWTException e) {
 JOptionPane.showMessageDialog(frame, "Issue while initiating Robot object "+e.getMessage());
 System.out.println("Issue while initiating Robot object "+e.getMessage());
 throw new Exception("Issue while initiating Robot object");
 }
 }
 
 public static void takeScreenshot(Robot r)
 {
 Dimension size = Toolkit.getDefaultToolkit().getScreenSize();
 Rectangle rec=new Rectangle(size);
 if(isRegionSelected)
 {
 rec=new Rectangle(c1, c2, c3-c1, c4-c2);
 }
 while(!videoComplete)
 {
 counter++;
 BufferedImage img = r.createScreenCapture(rec);
 try {
 ImageIO.write(img, inputImgExt, new File(inputImageDir+counter+"."+inputImgExt));
 } catch (IOException e) {
 JOptionPane.showMessageDialog(frame, "Got an issue while writing the screenshot to disk "+e.getMessage());
 System.out.println("Got an issue while writing the screenshot to disk "+e.getMessage());
 counter--;
 }
 }
 }
 
 public static void prepareVideo()
 {
 File scanFolder=new File(inputImageDir);
 while(!videoComplete)
 {
 File[] inputFiles=scanFolder.listFiles();
 try {
 getRobot().delay(500);
 } catch (Exception e) {
 }
 //for(int i=0;i/imgProcessed++;
 addImageToVideo(inputFiles[i].getAbsolutePath());
 //String imgToAdd=scanFolder.getAbsolutePath()+File.separator+imgProcessed+"."+inputImgExt;
 //addImageToVideo(imgToAdd);
 //new File(imgToAdd).delete();
 inputFiles[i].delete();
 }
 }
 
 File[] inputFiles=scanFolder.listFiles();
 for(int i=0;i/ maximum quality
 recorder.start();
 }
 catch(Exception e)
 {
 JOptionPane.showMessageDialog(frame, "Exception while starting the recorder object "+e.getMessage());
 System.out.println("Exception while starting the recorder object "+e.getMessage());
 throw new Exception("Unable to start recorder");
 }
 return recorder;
 }
 
 public static OpenCVFrameConverter.ToIplImage getFrameConverter()
 {
 OpenCVFrameConverter.ToIplImage grabberConverter = new OpenCVFrameConverter.ToIplImage();
 return grabberConverter;
 }
 
 public static void addImageToVideo(String imgPath)
 {
 try {
 getRecorder().record(getFrameConverter().convert(cvLoadImage(imgPath)));
 } catch (Exception e) {
 JOptionPane.showMessageDialog(frame, "Exception while adding image to video "+e.getMessage());
 System.out.println("Exception while adding image to video "+e.getMessage());
 }
 }
 
 public static void stopRecording()
 {
 try {
 videoComplete=true;
 System.out.println("Stopping recording at "+new Date());
 t1.join();
 System.out.println("Screenshot thread complete");
 t2.join();
 System.out.println("Video maker thread complete");
 getRecorder().stop();
 System.out.println("Recording has been saved successfully at "+new File(outputVideo).getAbsolutePath());
 JOptionPane.showMessageDialog(frame, "Recording has been saved successfully at "+new File(outputVideo).getAbsolutePath());
 } catch (Exception e) {
 System.out.println("Exception while stopping the recorder "+e.getMessage());
 }
 }
 }



Imagepanel.java


import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Image;
import javax.swing.ImageIcon;
import javax.swing.JPanel;

class ImagePanel
 extends JPanel
{
 private Image img;
 
 public ImagePanel(String img)
 {
 this(new ImageIcon(img).getImage());
 }
 
 public ImagePanel(Image img)
 {
 this.img = img;
 Dimension size = new Dimension(img.getWidth(null), img.getHeight(null));
 
 setPreferredSize(size);
 setMinimumSize(size);
 setMaximumSize(size);
 setSize(size);
 setLayout(null);
 }
 
 public void paintComponent(Graphics g)
 {
 g.drawImage(this.img, 0, 0, null);
 }
}



CropRegion.java


import java.awt.AWTException;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Graphics;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.Toolkit;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.image.BufferedImage;
import java.io.IOException;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;


public class CropRegion implements MouseListener,
 MouseMotionListener {

 int drag_status = 0;
 int c1;
 int c2;
 int c3;
 int c4;
 JFrame frame=null;
 static int counter=0;
 JLabel background=null;

 
 public void getImage() throws AWTException, IOException, InterruptedException {
 Dimension size = Toolkit.getDefaultToolkit().getScreenSize();
 Robot robot = new Robot();
 BufferedImage img = robot.createScreenCapture(new Rectangle(size));
 ImagePanel panel = new ImagePanel(img);
 frame=new JFrame();
 frame.add(panel);
 frame.setLocation(0, 0);
 frame.setSize(size);
 frame.setLayout(new FlowLayout());
 frame.setUndecorated(true);
 frame.setVisible(true);
 frame.addMouseListener(this);
 frame.addMouseMotionListener(this);
 frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
 }

 public void draggedScreen() throws Exception {
 ScreenRecorder.c1=c1;
 ScreenRecorder.c2=c2;
 ScreenRecorder.c3=c3;
 ScreenRecorder.c4=c4;
 ScreenRecorder.isRegionSelected=true;
 JOptionPane.showMessageDialog(frame, "Region Selected.Please click on Start Recording button to record the selected region.");
 frame.dispose();
 }

 public void mouseClicked(MouseEvent arg0) {
 }

 public void mouseEntered(MouseEvent arg0) {
 }

 public void mouseExited(MouseEvent arg0) {
 }

 public void mousePressed(MouseEvent arg0) {
 paint();
 this.c1 = arg0.getX();
 this.c2 = arg0.getY();
 }

 public void mouseReleased(MouseEvent arg0) {
 paint();
 if (this.drag_status == 1) {
 this.c3 = arg0.getX();
 this.c4 = arg0.getY();
 try {
 draggedScreen();
 } catch (Exception e) {
 e.printStackTrace();
 }
 }
 }

 public void mouseDragged(MouseEvent arg0) {
 paint();
 this.drag_status = 1;
 this.c3 = arg0.getX();
 this.c4 = arg0.getY();
 }

 public void mouseMoved(MouseEvent arg0) {
 }

 public void paint() {
 Graphics g = frame.getGraphics();
 frame.repaint();
 int w = this.c1 - this.c3;
 int h = this.c2 - this.c4;
 w *= -1;
 h *= -1;
 if (w < 0) {
 w *= -1;
 }
 g.drawRect(this.c1, this.c2, w, h);
 }
}