Newest 'ffmpeg' Questions - Stack Overflow

http://stackoverflow.com/questions/tagged/ffmpeg

Les articles publiés sur le site

  • How to convert any video format to mp4 format using FFmpeg API

    27 avril 2017, par krish

    Using FFmpeg API :ConvertLiveMediaTask

    ConvertLiveMedia(Stream inputStream, string inputFormat, Stream outputStream, string outputFormat, ConvertSettings settings);

    Here is my code, Requirement : want to convert video file from any format to mp4 format and conversion should happen on fly using ConvertLiveMedia API

    Please help me for this problem, I googled so many times i didnt get a solution for this.

    //Declarations part

    const int megabyte = 1024 * 1024;
    
            static string theFilename = @"F:\BackUp\Short video clip-nature.mp4";
    
            static void Main(string[] args)
            {
                ChunkedData(theFilename, 0);
            }
    
            private static void ChunkedData(string theFilename, long whereToStartReading = 0)
            {
    
                FileStream fileStram = new FileStream(theFilename, FileMode.Open, FileAccess.Read);
                using (fileStram)
                {
                    byte[] buffer = new byte[megabyte];
                    fileStram.Seek(whereToStartReading, SeekOrigin.Begin);
                    int bytesRead = fileStram.Read(buffer, 0, megabyte);
                    while (bytesRead > 0)
                    {
                        StreamData(buffer, bytesRead);
                        bytesRead = fileStram.Read(buffer, 0, megabyte);
                    }
                }
            }
    
            private static void StreamData(byte[] buffer, int bytesRead)
            {
                // Have no idea what to give for ConvertSettings, i simply pass a object
                ConvertSettings cs = new ConvertSettings();
    
                var ffmpeg = new FFMpegConverter();
    
                Stream inputStream = new MemoryStream(buffer);
    
                Stream outputStream = new MemoryStream();
    
                try
                {
                    // Here im getting outputStream capacity = 0, length = 0, position =0.
                    ffmpeg.ConvertLiveMedia(inputStream, Format.mp4, outputStream, Format.avi, cs);
    
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.ToString());
                }
    
    
            }
    
  • video compression error in android

    27 avril 2017, par Arpan Sharma

    I am using ffmpeg for video compression in android.I an following this link But i always get exit code 1.I am trying to get video from VIDEO CAPTURE intent inside a fragment. This is my code

    final Clip clip_in = new Clip(videoPath);
            File fileTmp = activity.getCacheDir();
            File fileAppRoot = new File(activity.getApplicationInfo().dataDir);
    
            final Clip clip_out = new Clip(videoPath);
            //put flags in clip
            clip_out.videoFps = "30";
            clip_out.width = 480;
            clip_out.height = 320;
            clip_out.videoCodec = "libx264";
            clip_out.audioCodec = "copy";
    
            try {
                FfmpegController fc = new FfmpegController(getContext(), fileTmp);
                fc.processVideo(clip_in, clip_out, false, new ShellUtils.ShellCallback() {
    
                    @Override
                    public void shellOut(String shellLine) {
                        System.out.println("MIX> " + shellLine);
                    }
    
                    @Override
                    public void processComplete(int exitValue) {
                        if (exitValue != 0) {
    //                        System.err.println("concat non-zero exit: " + exitValue);
                            Log.d("ffmpeg", "Compilation error. FFmpeg failed");
                            Toast.makeText(activity, "result: ffmpeg failed", Toast.LENGTH_LONG).show();
                        } else {
                            if (new File("/storage/emulated/0/Developer/result2.mp4").exists()) {
                                Log.d("ffmpeg", "Success file:" + "/storage/emulated/0/Developer/result2.mp4");
                            }
                        }
                    }
                });
    
            } catch (Exception e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
    

    Any help will be appreciated.

  • ffmpeg reconnect to rtmp output if error

    27 avril 2017, par boygiandi

    I'm trying to livestream to facebook, it's fine but sometime it got error and stop the stream

    [ sh: 2017-04-27 10:31:34 ]size= 296042kB time=00:13:04.48 bitrate=3091.4kbits/s speed= 1x

    [ sh: 2017-04-27 10:31:35 ]size= 296605kB time=00:13:05.48 bitrate=3093.4kbits/s speed= 1x

    [ sh: 2017-04-27 10:31:36 ]size= 296928kB time=00:13:06.50 bitrate=3092.7kbits/s speed= 1x

    [ sh: 2017-04-27 10:31:37 ]size= 297259kB time=00:13:07.48 bitrate=3092.3kbits/s speed= 1x

    [flv @ 0x32c91e0] Failed to update header with correct duration.

    [flv @ 0x32c91e0] Failed to update header with correct filesize. frame=23623 fps= 30 q=13.0 Lsize= 297346kB time=00:13:07.52 bitrate=3093.0kbits/s speed= 1x

    video:284080kB audio:12242kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.345891%

    [libx264 @ 0x3294c80] frame I:394 Avg QP:10.66 size: 99956

    [libx264 @ 0x3294c80] frame P:23229 Avg QP:13.94 size: 10828

    [libx264 @ 0x3294c80] mb I I16..4: 100.0% 0.0% 0.0%

    [libx264 @ 0x3294c80] mb P I16..4: 2.8% 0.0% 0.0% P16..4: 34.9% 0.0% 0.0% 0.0% 0.0% skip:62.2%

    [libx264 @ 0x3294c80] coded y,uvDC,uvAC intra: 64.8% 69.3% 44.6% inter: 20.5% 16.6% 4.2%

    [libx264 @ 0x3294c80] i16 v,h,dc,p: 27% 51% 12% 10%

    [libx264 @ 0x3294c80] i8c dc,h,v,p: 34% 40% 16% 10%

    [libx264 @ 0x3294c80] kb/s:2955.27

    [aac @ 0x33825e0] Qavg: 1929.185

    I have no idea why it stopped, but is there any option to ignore error and still livestream ? Or another way, re-stream again from beginning. When I tried to do that by run ffmpeg (after few seconds) command again. It said

    [rtmp @ 0x3c01420] Server error: Initialization failed (2: Broadcast state is bad)

    rtmp://rtmp-api.facebook.com:80/rtmp/1658103677537416?ds=1&s_l=1&a=ATjLWmaYE8qulMzm: Operation not permitted

    I can't stream again to facebook rtmp url. Please help

  • FFmpeg SwrContext incorrectly converting leftover data after seek

    27 avril 2017, par trigger_death

    I currently have my own custom SFML SoundFileReader that uses FFmpeg for more file formats. It works great for the most part until you seek and then you get leftover data from the previous location when using swr_convert. I currently have a hackish (I think) solution to the problem where I call swr_init after seeking to remove whatever data is leftover in there. I assumed that swr_convert's documentation on flushing would be the solution to the issue yet either it doesn't help or I'm not doing it correctly. Is there a proper way to clear the leftover data in the SwrContext after seeking?

    void seekBeginning() {
        av_seek_frame(
            m_formatContext, m_audioStream,
            m_formatContext->streams[m_audioStream]->first_dts,
            AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY
        );
        avcodec_flush_buffers(m_codecContext);
    
        // This fixes the issue but it seems like a horribly incorrect way of doing it
        swr_init(m_convertContext);
    
        // I've tried this but it doesn't seem to work
        //swr_convert(m_convertContext, NULL, 0, NULL, 0);
    }
    
    Uint64 read(Int16* samples, Uint64 maxCount) {
        Uint64 count = 0;
        while (count < maxCount) {
            if (m_packet->stream_index == m_audioStream) {
                while (m_packet->size > 0) {
                    int gotFrame = 0;
                    int result = avcodec_decode_audio4(m_codecContext, m_frame, &gotFrame, m_packet);
                    if (result >= 0 && gotFrame) {
                        int samplesToRead = static_cast(maxCount - count) / m_codecContext->channels;
                        if (samplesToRead > m_frame->nb_samples)
                            samplesToRead = m_frame->nb_samples;
                        m_packet->size -= result;
                        m_packet->data += result;
                        result = swr_convert(m_convertContext, (uint8_t**)&samples, samplesToRead, (const uint8_t**)m_frame->data, m_frame->nb_samples);
    
                        if (result > 0) {
                            count += result * m_codecContext->channels;
                            samples += result * m_codecContext->channels;
                        }
                        else {
                            m_packet->size = 0;
                            m_packet->data = NULL;
                        }
                    }
                    else {
                        m_packet->size = 0;
                        m_packet->data = NULL;
                    }
                }
            }
            av_free_packet(m_packet);
        }
    
        return count;
    }
    
  • Couldn't capture the input video on hadoop

    27 avril 2017, par 彭思倩

    I tried to use JNI with packaging the c++ code based on opencv and ffmpeg to file .so and inculding the file .so to java code.

    The c++ code:

    #include "DenseTrackStab.h"
    #include "Initialize.h"
    #include "Descriptors.h"
    #include "OpticalFlow.h"
    #include 
    #include "JNItest2.h"
    
    #include 
    
    using namespace cv;
    
    int show_track = 0; // set show_track = 1, if you want to visualize the         trajectories
    int my_i = 1;
    
    JNIEXPORT void JNICALL
    Java_JNItest2_helloworld(JNIEnv *env,jobject object, jobjectArray args)
    {
        int argc = env->GetArrayLength(args);   
            char* argv[argc];
        printf("%d\n",argc );
         /* convert String[] to const char**    */
        for(int i = 0; i < argc; i++) {
    
            jstring string = (jstring) env->GetObjectArrayElement(args, i);
            int slength = env->GetStringLength(string);
            char *wchars = (char *)env->GetStringUTFChars(string,NULL);
            argv[i] = wchars;
        }
    
        VideoCapture capture;
        char* video = argv[0];
        int flag = arg_parse(argc, argv);
        capture.open(video);
    
        if(!capture.isOpened()) {
            fprintf(stderr, "Could not initialize capturing..\n");
            return;
        }
    }
    

    The java code:

    public class JNItest2 {
    
    public native void helloworld(String[] args);
    
        static{
            System.loadLibrary("Dense");        
        }
        public static void main(String[] args)
        {        
             new JNItest2().helloworld(args);
        }
    }
    

    Then, I exported the java code into the executable file jar. And I tried to execute the file jar on hadoop. So, I put the input video on hadoop and ran the file jar. But, the program couldn't capture the input video. The hadoop executed instruction is

    hadoop jar /home/604410015/package/JNItest2/src/JNItest2.jar/user/604410015/src01_hrc01_ip64_qi32_qp32_qb32_hier0_s1920x1080p25n400v0.avi  |gzip > out.features
    

    Then, the output message is

    Could not initialize capturing..
    

    I think the file jar was executed,but I don't know why I can not capture the video. Anyone can help me. Thank you.