Newest 'ffmpeg' Questions - Stack Overflow
Les articles publiés sur le site
-
Change youtube-dl download output (Windows)
13 avril 2016, par Inforcer25I hope you can help me.
I am using youtube-dl on windows (youtube-dl.exe) Downloading the video works great and also just the audio. But what i want is for it to save the audio file in a different place eg. C:\Users*******\Desktop
I made a batch file with this code:
:audio cls echo. echo. echo Your audio vill be downloaded and saved as a .mp3 format echo. echo. set /p audio=Enter Video URL here: cls youtube-dl.exe --extract-audio --audio-format mp3 --output C:\Users\*******\Desktop\(ext)s.%(ext)s %audio% pause cls echo. echo. echo. echo. echo Your audio has now been downloaded. ping localhost -n 3 >nul exit
and then it gives me this
Usage: youtube-dl.exe [OPTIONS] URL [URL...] youtube-dl.exe: error: You must provide at least one URL. Type youtube-dl --help to see a list of all options. Press any key to continue . . .
It works fine if i use this but it saves it in the same folder.
:audio cls echo. echo. echo Your audio vill be downloaded and saved as a .mp3 format echo. echo. set /p audio=Enter Video URL here: cls youtube-dl.exe --extract-audio --audio-format mp3 %audio% pause cls echo. echo. echo. echo. echo Your audio has now been downloaded. ping localhost -n 3 >nul exit
Also please keep in mind that it also uses ffprobe.exe and ffmpeg.exe (They are both in the same folder as youtube-dl.exe
-
Use ffmpeg convert mp4 to hls m3u8 and reserver audio
13 avril 2016, par user3678248I have a mp4 file, input0.mp4. Now I want to convert it to a HLS and only reserver audio with one step. Now I can realease it with two steps as follows. First converting it to a mp3 file, second slicing mp3 file to m3m8.
ffmpeg -i input0.mp4 input0.mp3 ffmpeg -i input0.mp3 -c:v libx264 -c:a aac -strict -2 -f hls -hls_list_size 0 output.m3u8
If I want to use ffmpeg to realease it with one step, how can I do it.
-
FFMPEG Code Understanding
13 avril 2016, par EdgeI have the following code:
} private function getIntroVideoFile() { global $upload_dir; foreach ($this->myusers as $user) { $sql = "SELECT IntroVideoFile from IntroVideos where IntroVideoID in (select IntroVideoID from EventInfo where EventInfo.PackageID = '{$user->packageid}' and EventInfo.SlotID = '{$this->curSlotId}') "; $row = db_fetch_item($sql); $user->introVideoFile = $row['IntroVideoFile']; exec('ffmpeg -re -i "'.$upload_dir.$user->introVideoFile.'" -sameq -vcodec copy -acodec copy -f flv rtmp://XX.XX.XX.XXX:1935/live/'.$user->username . ' >/dev/null 2>/dev/null < /dev/null &'); echo "Starting play ".$user->introVideoFile."\n"; }
Please can someone explain to me what is happening here.
-
Cant we use mp4 as format of FFmpegFrameRecorder while RTMP streaming
13 avril 2016, par SatyI am trying to stream video with FFmpegFrameRecorder using javacv. All are working good however I want to use "mp4" as the recording format instead of FLV. is that possible because when i set that, it crashed?
I am using the below code which half of the internet community is using to live stream
public class MainActivity extends Activity implements OnClickListener { private final static String LOG_TAG = "MainActivity"; private PowerManager.WakeLock mWakeLock; private String ffmpeg_link = "rtmp://username:password@xxx.xxx.xxx.xxx:1935/live/test.flv"; //private String ffmpeg_link = "/mnt/sdcard/new_stream.flv"; private volatile FFmpegFrameRecorder recorder; boolean recording = false; long startTime = 0; private int sampleAudioRateInHz = 44100; private int imageWidth = 320; private int imageHeight = 240; private int frameRate = 30; private Thread audioThread; volatile boolean runAudioThread = true; private AudioRecord audioRecord; private AudioRecordRunnable audioRecordRunnable; private CameraView cameraView; private IplImage yuvIplimage = null; private Button recordButton; private LinearLayout mainLayout; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); setContentView(R.layout.activity_main); initLayout(); initRecorder(); } @Override protected void onResume() { super.onResume(); if (mWakeLock == null) { PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, LOG_TAG); mWakeLock.acquire(); } } @Override protected void onPause() { super.onPause(); if (mWakeLock != null) { mWakeLock.release(); mWakeLock = null; } } @Override protected void onDestroy() { super.onDestroy(); recording = false; } private void initLayout() { mainLayout = (LinearLayout) this.findViewById(R.id.record_layout); recordButton = (Button) findViewById(R.id.recorder_control); recordButton.setText("Start"); recordButton.setOnClickListener(this); cameraView = new CameraView(this); LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(imageWidth, imageHeight); mainLayout.addView(cameraView, layoutParam); Log.v(LOG_TAG, "added cameraView to mainLayout"); } private void initRecorder() { Log.w(LOG_TAG,"initRecorder"); if (yuvIplimage == null) { // Recreated after frame size is set in surface change method yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2); //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2); Log.v(LOG_TAG, "IplImage.create"); } recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight); recorder.setFormat("flv"); Log.v(LOG_TAG, "recorder.setFormat(\"flv\")"); recorder.setSampleRate(sampleAudioRateInHz); Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)"); // re-set in the surface changed method as well recorder.setFrameRate(frameRate); Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)"); // Create audio recording thread audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); } // Start the capture public void startRecording() { try { recorder.start(); startTime = System.currentTimeMillis(); recording = true; audioThread.start(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } } public void stopRecording() { // This should stop the audio thread from running runAudioThread = false; if (recorder != null && recording) { recording = false; Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder"); try { recorder.stop(); recorder.release(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } recorder = null; } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { // Quit when back button is pushed if (keyCode == KeyEvent.KEYCODE_BACK) { if (recording) { stopRecording(); } finish(); return true; } return super.onKeyDown(keyCode, event); } @Override public void onClick(View v) { if (!recording) { startRecording(); Log.w(LOG_TAG, "Start Button Pushed"); recordButton.setText("Stop"); } else { stopRecording(); Log.w(LOG_TAG, "Stop Button Pushed"); recordButton.setText("Start"); } } //--------------------------------------------- // audio thread, gets and encodes audio data //--------------------------------------------- class AudioRecordRunnable implements Runnable { @Override public void run() { // Set the thread priority android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // Audio int bufferSize; short[] audioData; int bufferReadResult; bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); audioData = new short[bufferSize]; Log.d(LOG_TAG, "audioRecord.startRecording()"); audioRecord.startRecording(); // Audio Capture/Encoding Loop while (runAudioThread) { // Read from audioRecord bufferReadResult = audioRecord.read(audioData, 0, audioData.length); if (bufferReadResult > 0) { //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult); // Changes in this variable may not be picked up despite it being "volatile" if (recording) { try { // Write to FFmpegFrameRecorder Buffer[] buffer = {ShortBuffer.wrap(audioData, 0, bufferReadResult)}; recorder.record(buffer); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } } Log.v(LOG_TAG,"AudioThread Finished"); /* Capture/Encoding finished, release recorder */ if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; Log.v(LOG_TAG,"audioRecord released"); } } } class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback { private boolean previewRunning = false; private SurfaceHolder holder; private Camera camera; private byte[] previewBuffer; long videoTimestamp = 0; Bitmap bitmap; Canvas canvas; public CameraView(Context _context) { super(_context); holder = this.getHolder(); holder.addCallback(this); holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); } @Override public void surfaceCreated(SurfaceHolder holder) { camera = Camera.open(); try { camera.setPreviewDisplay(holder); camera.setPreviewCallback(this); Camera.Parameters currentParams = camera.getParameters(); Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate()); Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height); // Use these values imageWidth = currentParams.getPreviewSize().width; imageHeight = currentParams.getPreviewSize().height; frameRate = currentParams.getPreviewFrameRate(); bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.ALPHA_8); /* Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8); previewBuffer = new byte[imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8]; camera.addCallbackBuffer(previewBuffer); camera.setPreviewCallbackWithBuffer(this); */ camera.startPreview(); previewRunning = true; } catch (IOException e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Log.v(LOG_TAG,"Surface Changed: width " + width + " height: " + height); // We would do this if we want to reset the camera parameters /* if (!recording) { if (previewRunning){ camera.stopPreview(); } try { //Camera.Parameters cameraParameters = camera.getParameters(); //p.setPreviewSize(imageWidth, imageHeight); //p.setPreviewFrameRate(frameRate); //camera.setParameters(cameraParameters); camera.setPreviewDisplay(holder); camera.startPreview(); previewRunning = true; } catch (IOException e) { Log.e(LOG_TAG,e.getMessage()); e.printStackTrace(); } } */ // Get the current parameters Camera.Parameters currentParams = camera.getParameters(); Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate()); Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height); // Use these values imageWidth = currentParams.getPreviewSize().width; imageHeight = currentParams.getPreviewSize().height; frameRate = currentParams.getPreviewFrameRate(); // Create the yuvIplimage if needed yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2); //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2); } @Override public void surfaceDestroyed(SurfaceHolder holder) { try { camera.setPreviewCallback(null); previewRunning = false; camera.release(); } catch (RuntimeException e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } @Override public void onPreviewFrame(byte[] data, Camera camera) { if (yuvIplimage != null && recording) { videoTimestamp = 1000 * (System.currentTimeMillis() - startTime); // Put the camera preview frame right into the yuvIplimage object yuvIplimage.getByteBuffer().put(data); // FAQ about IplImage: // - For custom raw processing of data, getByteBuffer() returns an NIO direct // buffer wrapped around the memory pointed by imageData, and under Android we can // also use that Buffer with Bitmap.copyPixelsFromBuffer() and copyPixelsToBuffer(). // - To get a BufferedImage from an IplImage, we may call getBufferedImage(). // - The createFrom() factory method can construct an IplImage from a BufferedImage. // - There are also a few copy*() methods for BufferedImage<->IplImage data transfers. // Let's try it.. // This works but only on transparency // Need to find the right Bitmap and IplImage matching types /* bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer()); //bitmap.setPixel(10,10,Color.MAGENTA); canvas = new Canvas(bitmap); Paint paint = new Paint(); paint.setColor(Color.GREEN); float leftx = 20; float topy = 20; float rightx = 50; float bottomy = 100; RectF rectangle = new RectF(leftx,topy,rightx,bottomy); canvas.drawRect(rectangle, paint); bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer()); */ //Log.v(LOG_TAG,"Writing Frame"); try { // Get the correct time recorder.setTimestamp(videoTimestamp); // Record the image into FFmpegFrameRecorder recorder.record(yuvIplimage); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } }
}
-
[FFmpeg]how to make codes for converting jpg files to avi(motion jpeg)
13 avril 2016, par YJJI want to make the code so that I can embed the code to a machine with cameras.
I have one I have worked on.
It generates a output avi file, but it doesn't work.
I think I didn't make a logic to input raw images and I don't know how.
I want to input 100 jpg images from street_01.jpg to street_99.jpg
int main(int argc, char* argv[]) { AVFormatContext* pFormatCtx; AVOutputFormat* fmt; AVStream* video_st; AVCodecContext* pCodecCtx; AVCodec* pCodec; AVPacket pkt; uint8_t* picture_buf; AVFrame* pFrame; int picture_size; int y_size; int framecnt = 0; //FILE *in_file = fopen("src01_480x272.yuv", "rb"); //Input raw YUV data FILE *in_file = fopen("street_01.jpg", "rb"); //Input raw YUV data int in_w = 2456, in_h = 2058; //Input data's width and height int framenum = 100; //Frames to encode //const char* out_file = "src01.h264"; //Output Filepath //const char* out_file = "src01.ts"; //const char* out_file = "src01.hevc"; const char* out_file = "output.avi"; av_register_all(); /* //Method1. pFormatCtx = avformat_alloc_context(); //Guess Format fmt = av_guess_format(NULL, out_file, NULL); pFormatCtx->oformat = fmt; */ //Method 2. avformat_alloc_output_context2(&pFormatCtx, NULL, "avi", out_file); fmt = pFormatCtx->oformat; //Open output URL if (avio_open(&pFormatCtx->pb, out_file, AVIO_FLAG_READ_WRITE) < 0){ printf("Failed to open output file! \n"); return -1; } video_st = avformat_new_stream(pFormatCtx, 0); video_st->time_base.num = 1; video_st->time_base.den = 25; if (video_st == NULL){ return -1; } //Param that must set pCodecCtx = video_st->codec; //pCodecCtx->codec_id =AV_CODEC_ID_HEVC; pCodecCtx->codec_id = AV_CODEC_ID_MJPEG; //pCodecCtx->codec_id = fmt->video_codec; pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO; pCodecCtx->pix_fmt = AV_PIX_FMT_YUV444P; pCodecCtx->width = in_w; pCodecCtx->height = in_h; pCodecCtx->time_base.num = 1; pCodecCtx->time_base.den = 25; pCodecCtx->bit_rate = 400000; pCodecCtx->gop_size = 250; //H264 //pCodecCtx->me_range = 16; //pCodecCtx->max_qdiff = 4; //pCodecCtx->qcompress = 0.6; pCodecCtx->qmin = 10; pCodecCtx->qmax = 51; //Optional Param pCodecCtx->max_b_frames = 3; // Set Option AVDictionary *param = 0; //H264 if (pCodecCtx->codec_id == AV_CODEC_ID_H264) { av_dict_set(¶m, "preset", "slow", 0); av_dict_set(¶m, "tune", "zerolatency", 0); //av_dict_set(¶m, "profile", "main", 0); } //H265 if (pCodecCtx->codec_id == AV_CODEC_ID_H265){ av_dict_set(¶m, "preset", "ultrafast", 0); av_dict_set(¶m, "tune", "zero-latency", 0); } //Show some Information av_dump_format(pFormatCtx, 0, out_file, 1); pCodec = avcodec_find_encoder(pCodecCtx->codec_id); if (!pCodec){ printf("Can not find encoder! \n"); return -1; } if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){ printf("Failed to open encoder! \n"); return -1; } pFrame = av_frame_alloc(); //picture_size = av_image_get_buffer_size(pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, 1); picture_size = avpicture_get_size(pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); picture_buf = (uint8_t *)av_malloc(picture_size); avpicture_fill((AVPicture *)pFrame, picture_buf, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); //Write File Header avformat_write_header(pFormatCtx, NULL); av_new_packet(&pkt, picture_size); y_size = pCodecCtx->width * pCodecCtx->height; for (int i = 0; i/Read raw YUV data if (fread(picture_buf, 1, y_size * 3 / 2, in_file) <= 0){ printf("Failed to read raw data! \n"); return -1; } else if (feof(in_file)){ break; } pFrame->data[0] = picture_buf; // Y pFrame->data[1] = picture_buf + y_size; // U pFrame->data[2] = picture_buf + y_size * 5 / 4; // V //PTS pFrame->pts = i; int got_picture = 0; //Encode int ret = avcodec_encode_video2(pCodecCtx, &pkt, pFrame, &got_picture); if (ret < 0){ printf("Failed to encode! \n"); return -1; } if (got_picture == 1){ printf("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, pkt.size); framecnt++; pkt.stream_index = video_st->index; ret = av_write_frame(pFormatCtx, &pkt); av_free_packet(&pkt); } } //Flush Encoder int ret = flush_encoder(pFormatCtx, 0); if (ret < 0) { printf("Flushing encoder failed\n"); return -1; } //Write file trailer av_write_trailer(pFormatCtx); //Clean if (video_st){ avcodec_close(video_st->codec); av_free(pFrame); av_free(picture_buf); } avio_close(pFormatCtx->pb); avformat_free_context(pFormatCtx); fclose(in_file); return 0; }