
Recherche avancée
Médias (1)
-
ED-ME-5 1-DVD
11 octobre 2011, par
Mis à jour : Octobre 2011
Langue : English
Type : Audio
Autres articles (57)
-
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir -
HTML5 audio and video support
13 avril 2011, parMediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
For older browsers the Flowplayer flash fallback is used.
MediaSPIP allows for media playback on major mobile platforms with the above (...) -
De l’upload à la vidéo finale [version standalone]
31 janvier 2010, parLe chemin d’un document audio ou vidéo dans SPIPMotion est divisé en trois étapes distinctes.
Upload et récupération d’informations de la vidéo source
Dans un premier temps, il est nécessaire de créer un article SPIP et de lui joindre le document vidéo "source".
Au moment où ce document est joint à l’article, deux actions supplémentaires au comportement normal sont exécutées : La récupération des informations techniques des flux audio et video du fichier ; La génération d’une vignette : extraction d’une (...)
Sur d’autres sites (8873)
-
Converting a voice recording into an mp3
21 juillet 2023, par Raphael MFor a vue.js messaging project, I'm using the wavesurfer.js library to record voice messages. However Google chrome gives me an audio/webm blob and Safari gives me an audio/mp4 blob.


I'm trying to find a solution to transcode the blob into audio/mp3. I've tried several methods, including ffmpeg. However, ffmpeg gives me an error when compiling "npm run dev" : "Can't resolve '/node_modules/@ffmpeg/core/dist/ffmpeg-core.js'".


"@ffmpeg/core": "^0.11.0",
"@ffmpeg/ffmpeg": "^0.11.6"



I tried to downgrade ffmpeg


"@ffmpeg/core": "^0.9.0",
"@ffmpeg/ffmpeg": "^0.9.8"



I no longer get the error message when compiling, but when I want to convert my audio stream, the console displays a problem with SharedBuffer : "Uncaught (in promise) ReferenceError : SharedArrayBuffer is not defined".


Here's my complete code below.
Is there a reliable way of transcoding the audio stream into mp3 ?


Can you give me an example ?


Thanks


<template>
 <div class="left-panel">
 <header class="radial-blue">
 <div class="container">
 <h1 class="mb-30">Posez votre première question à nos thérapeutes</h1>
 <p><b>Attention</b>, vous disposez seulement de 2 messages. Veillez à les utiliser de manière judicieuse !</p>
 <div class="available-messages">
 <div class="item disabled">
 <span>Message 1</span>
 </div>
 <div class="item">
 <span>Message 2</span>
 </div>
 </div>
 </div>
 </header>
 </div>
 <div class="right-panel">
 <div class="messagerie bg-light">
 <messaging ref="messagingComponent"></messaging>
 <footer>
 <button type="button"><img src="http://stackoverflow.com/assets/backoffice/images/record-start.svg" style='max-width: 300px; max-height: 300px' /></button>
 <div class="loading-animation">
 <img src="http://stackoverflow.com/assets/backoffice/images/record-loading.svg" style='max-width: 300px; max-height: 300px' />
 </div>
 <button type="button"><img src="http://stackoverflow.com/assets/backoffice/images/record-stop.svg" style='max-width: 300px; max-height: 300px' /></button>
 <div class="textarea gradient text-dark">
 <textarea placeholder="Posez votre question"></textarea>
 </div>
 <div class="loading-text">Chargement de votre microphone en cours...</div>
 <div class="loading-text">Envoi de votre message en cours...</div>
 <div ref="visualizer"></div>
 <button type="button"><img src="http://stackoverflow.com/assets/backoffice/images/send.svg" style='max-width: 300px; max-height: 300px' /></button>
 <div>
 {{ formatTimer() }}
 </div>
 </footer>
 </div>
 </div>
</template>

<code class="echappe-js"><script>&#xA;import Messaging from "./Messaging.vue";&#xA;import { createFFmpeg, fetchFile } from &#x27;@ffmpeg/ffmpeg&#x27;;&#xA;&#xA;export default {&#xA; data() {&#xA; return {&#xA; isMicrophoneLoading: false,&#xA; isSubmitLoading: false,&#xA; isMobile: false,&#xA; isMessagerie: false,&#xA; isRecording: false,&#xA; audioUrl: &#x27;&#x27;,&#xA; messageText: &#x27;&#x27;,&#xA; message:null,&#xA; wavesurfer: null,&#xA; access:(this.isMobile?&#x27;denied&#x27;:&#x27;granted&#x27;),&#xA; maxMinutes: 5,&#xA; orangeTimer: 3,&#xA; redTimer: 4,&#xA; timer: 0,&#xA; timerInterval: null,&#xA; ffmpeg: null,&#xA; };&#xA; },&#xA; components: {&#xA; Messaging,&#xA; },&#xA; mounted() {&#xA; this.checkScreenSize();&#xA; window.addEventListener(&#x27;resize&#x27;, this.checkScreenSize);&#xA;&#xA; if(!this.isMobile)&#xA; {&#xA; this.$moment.locale(&#x27;fr&#x27;);&#xA; window.addEventListener(&#x27;beforeunload&#x27;, (event) => {&#xA; if (this.isMessagerie) {&#xA; event.preventDefault();&#xA; event.returnValue = &#x27;&#x27;;&#xA; }&#xA; });&#xA;&#xA; this.initializeWaveSurfer();&#xA; }&#xA; },&#xA; beforeUnmount() {&#xA; window.removeEventListener(&#x27;resize&#x27;, this.checkScreenSize);&#xA; },&#xA; methods: {&#xA; checkScreenSize() {&#xA; this.isMobile = window.innerWidth < 1200;&#xA;&#xA; const windowHeight = window.innerHeight;&#xA; const navbarHeight = this.$navbarHeight;&#xA; let padding = parseInt(navbarHeight &#x2B;181);&#xA;&#xA; const messageListHeight = windowHeight - padding;&#xA; this.$refs.messagingComponent.$refs.messageList.style.height = messageListHeight &#x2B; &#x27;px&#x27;;&#xA; },&#xA; showMessagerie() {&#xA; this.isMessagerie = true;&#xA; this.$refs.messagingComponent.scrollToBottom();&#xA; },&#xA; checkMicrophoneAccess() {&#xA; if (navigator.mediaDevices &amp;&amp; navigator.mediaDevices.getUserMedia) {&#xA;&#xA; return navigator.mediaDevices.getUserMedia({audio: true})&#xA; .then(function (stream) {&#xA; stream.getTracks().forEach(function (track) {&#xA; track.stop();&#xA; });&#xA; return true;&#xA; })&#xA; .catch(function (error) {&#xA; console.error(&#x27;Erreur lors de la demande d\&#x27;acc&#xE8;s au microphone:&#x27;, error);&#xA; return false;&#xA; });&#xA; } else {&#xA; console.error(&#x27;getUserMedia n\&#x27;est pas support&#xE9; par votre navigateur.&#x27;);&#xA; return false;&#xA; }&#xA; },&#xA; initializeWaveSurfer() {&#xA; this.wavesurfer = this.$wavesurfer.create({&#xA; container: &#x27;#visualizer&#x27;,&#xA; barWidth: 3,&#xA; barHeight: 1.5,&#xA; height: 46,&#xA; responsive: true,&#xA; waveColor: &#x27;rgba(108,115,202,0.3)&#x27;,&#xA; progressColor: &#x27;rgba(108,115,202,1)&#x27;,&#xA; cursorColor: &#x27;transparent&#x27;&#xA; });&#xA;&#xA; this.record = this.wavesurfer.registerPlugin(this.$recordPlugin.create());&#xA; },&#xA; startRecording() {&#xA; const _this = this;&#xA; this.isMicrophoneLoading = true;&#xA;&#xA; setTimeout(() =>&#xA; {&#xA; _this.checkMicrophoneAccess().then(function (accessible)&#xA; {&#xA; if (accessible) {&#xA; _this.record.startRecording();&#xA;&#xA; _this.record.once(&#x27;startRecording&#x27;, () => {&#xA; _this.isMicrophoneLoading = false;&#xA; _this.isRecording = true;&#xA; _this.updateChildMessage( &#x27;server&#x27;, &#x27;Allez-y ! Vous pouvez enregistrer votre message audio maintenant. La dur&#xE9;e maximale autoris&#xE9;e pour votre enregistrement est de 5 minutes.&#x27;, &#x27;text&#x27;, &#x27;&#x27;, &#x27;Message automatique&#x27;);&#xA; _this.startTimer();&#xA; });&#xA; } else {&#xA; _this.isRecording = false;&#xA; _this.isMicrophoneLoading = false;&#xA; _this.$swal.fire({&#xA; title: &#x27;Microphone non d&#xE9;tect&#xE9;&#x27;,&#xA; html: &#x27;<p>Le microphone de votre appareil est inaccessible ou l\&#x27;acc&#xE8;s a &#xE9;t&#xE9; refus&#xE9;.</p><p>Merci de v&#xE9;rifier les param&#xE8;tres de votre navigateur afin de v&#xE9;rifier les autorisations de votre microphone.</p>&#x27;,&#xA; footer: &#x27;<a href='http://stackoverflow.com/contact'>Vous avez besoin d\&#x27;aide ?</a>&#x27;,&#xA; });&#xA; }&#xA; });&#xA; }, 100);&#xA; },&#xA; stopRecording() {&#xA; this.stopTimer();&#xA; this.isRecording = false;&#xA; this.isSubmitLoading = true;&#xA; this.record.stopRecording();&#xA;&#xA; this.record.once(&#x27;stopRecording&#x27;, () => {&#xA; const blobUrl = this.record.getRecordedUrl();&#xA; fetch(blobUrl).then(response => response.blob()).then(blob => {&#xA; this.uploadAudio(blob);&#xA; });&#xA; });&#xA; },&#xA; startTimer() {&#xA; this.timerInterval = setInterval(() => {&#xA; this.timer&#x2B;&#x2B;;&#xA; if (this.timer === this.maxMinutes * 60) {&#xA; this.stopRecording();&#xA; }&#xA; }, 1000);&#xA; },&#xA; stopTimer() {&#xA; clearInterval(this.timerInterval);&#xA; this.timer = 0;&#xA; },&#xA; formatTimer() {&#xA; const minutes = Math.floor(this.timer / 60);&#xA; const seconds = this.timer % 60;&#xA; const formattedMinutes = minutes < 10 ? `0${minutes}` : minutes;&#xA; const formattedSeconds = seconds < 10 ? `0${seconds}` : seconds;&#xA; return `${formattedMinutes}:${formattedSeconds}`;&#xA; },&#xA; async uploadAudio(blob)&#xA; {&#xA; const format = blob.type === &#x27;audio/webm&#x27; ? &#x27;webm&#x27; : &#x27;mp4&#x27;;&#xA;&#xA; // Convert the blob to MP3&#xA; const mp3Blob = await this.convertToMp3(blob, format);&#xA;&#xA; const s3 = new this.$AWS.S3({&#xA; accessKeyId: &#x27;xxx&#x27;,&#xA; secretAccessKey: &#x27;xxx&#x27;,&#xA; region: &#x27;eu-west-1&#x27;&#xA; });&#xA;&#xA; var currentDate = new Date();&#xA; var filename = currentDate.getDate().toString() &#x2B; &#x27;-&#x27; &#x2B; currentDate.getMonth().toString() &#x2B; &#x27;-&#x27; &#x2B; currentDate.getFullYear().toString() &#x2B; &#x27;--&#x27; &#x2B; currentDate.getHours().toString() &#x2B; &#x27;-&#x27; &#x2B; currentDate.getMinutes().toString() &#x2B; &#x27;.mp4&#x27;;&#xA;&#xA; const params = {&#xA; Bucket: &#x27;xxx/audio&#x27;,&#xA; Key: filename,&#xA; Body: mp3Blob,&#xA; ACL: &#x27;public-read&#x27;,&#xA; ContentType: &#x27;audio/mp3&#x27;&#xA; }&#xA;&#xA; s3.upload(params, (err, data) => {&#xA; if (err) {&#xA; console.error(&#x27;Error uploading audio:&#x27;, err)&#xA; } else {&#xA; const currentDate = this.$moment();&#xA; const timestamp = currentDate.format(&#x27;dddd DD MMMM YYYY HH:mm&#x27;);&#xA;&#xA; this.updateChildMessage( &#x27;client&#x27;, &#x27;&#x27;, &#x27;audio&#x27;, mp3Blob, timestamp);&#xA; this.isSubmitLoading = false;&#xA; }&#xA; });&#xA; },&#xA; async convertToMp3(blob, format) {&#xA; const ffmpeg = createFFmpeg({ log: true });&#xA; await ffmpeg.load();&#xA;&#xA; const inputPath = &#x27;input.&#x27; &#x2B; format;&#xA; const outputPath = &#x27;output.mp3&#x27;;&#xA;&#xA; ffmpeg.FS(&#x27;writeFile&#x27;, inputPath, await fetchFile(blob));&#xA;&#xA; await ffmpeg.run(&#x27;-i&#x27;, inputPath, &#x27;-acodec&#x27;, &#x27;libmp3lame&#x27;, outputPath);&#xA;&#xA; const mp3Data = ffmpeg.FS(&#x27;readFile&#x27;, outputPath);&#xA; const mp3Blob = new Blob([mp3Data.buffer], { type: &#x27;audio/mp3&#x27; });&#xA;&#xA; ffmpeg.FS(&#x27;unlink&#x27;, inputPath);&#xA; ffmpeg.FS(&#x27;unlink&#x27;, outputPath);&#xA;&#xA; return mp3Blob;&#xA; },&#xA; sendMessage() {&#xA; this.isSubmitLoading = true;&#xA; if (this.messageText.trim() !== &#x27;&#x27;) {&#xA; const emmet = &#x27;client&#x27;;&#xA; const text = this.escapeHTML(this.messageText)&#xA; .replace(/\n/g, &#x27;<br>&#x27;);&#xA;&#xA; const currentDate = this.$moment();&#xA; const timestamp = currentDate.format(&#x27;dddd DD MMMM YYYY HH:mm&#x27;);&#xA;&#xA; this.$nextTick(() => {&#xA; this.messageText = &#x27;&#x27;;&#xA;&#xA; const textarea = document.getElementById(&#x27;messageTextarea&#x27;);&#xA; if (textarea) {&#xA; textarea.scrollTop = 0;&#xA; textarea.scrollLeft = 0;&#xA; }&#xA; });&#xA;&#xA; this.updateChildMessage(emmet, text, &#x27;text&#x27;, &#x27;&#x27;, timestamp);&#xA; this.isSubmitLoading = false;&#xA; }&#xA; },&#xA; escapeHTML(text) {&#xA; const map = {&#xA; &#x27;&amp;&#x27;: &#x27;&amp;amp;&#x27;,&#xA; &#x27;<&#x27;: &#x27;&amp;lt;&#x27;,&#xA; &#x27;>&#x27;: &#x27;&amp;gt;&#x27;,&#xA; &#x27;"&#x27;: &#x27;&amp;quot;&#x27;,&#xA; "&#x27;": &#x27;&amp;#039;&#x27;,&#xA; "`": &#x27;&amp;#x60;&#x27;,&#xA; "/": &#x27;&amp;#x2F;&#x27;&#xA; };&#xA; return text.replace(/[&amp;<>"&#x27;`/]/g, (match) => map[match]);&#xA; },&#xA; updateChildMessage(emmet, text, type, blob, timestamp) {&#xA; const newMessage = {&#xA; id: this.$refs.messagingComponent.lastMessageId &#x2B; 1,&#xA; emmet: emmet,&#xA; text: text,&#xA; type: type,&#xA; blob: blob,&#xA; timestamp: timestamp&#xA; };&#xA;&#xA; this.$refs.messagingComponent.updateMessages(newMessage);&#xA; }&#xA; },&#xA;};&#xA;</script>



-
Is it feasible to create FFmpegFrameGrabber one by one for single FFmpegFrameRecorder and maintain video stream keep alive ?
12 juillet 2023, par zhoutianThe reason why I ask these question is I got byte [] of container data(name is dhav) one by one and I need to push that data continuously to RTMP to play。


What's the current progress I made ?


For now ,I can push data to RTMP and play RTMP by VLC just for few seconds,then the RTMP stream is end .


because the grabber created by inputstream only contain a few of the data come from ByteBuffer ,when that inputstream is end, the RTMP is closed.


synchronized (buffer) {
 buffer.flip();
 byte[] bytes = new byte[buffer.remaining()];
 buffer.get(bytes);
 buffer.clear();
 isByteBufferFull[0] = false;
 try {
 grabAndPush(bytes, SRS_PUSH_ADDRESS);
 } catch (Exception e) {
 //throw new RuntimeException(e);
 }

 }



private static synchronized void grabAndPush(byte[] bytes, String pushAddress) throws Exception {
 avutil.av_log_set_level(avutil.AV_LOG_INFO);
 FFmpegLogCallback.set();

 FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(new ByteArrayInputStream(bytes));
...
}



So can anyone tell me how to keep the RTMP aways alive by FFmpegFrameGrabber and FFmpegFrameRecorder when the source data come from one by one.
very appreciate 😃


this is my code :


import lombok.extern.slf4j.Slf4j;
import org.bytedeco.ffmpeg.avcodec.AVCodecParameters;
import org.bytedeco.ffmpeg.avformat.AVFormatContext;
import org.bytedeco.ffmpeg.avformat.AVStream;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.FFmpegLogCallback;
import org.bytedeco.javacv.Frame;
import org.jfjy.ch2ji.ecctv.dh.api.ApiService;
import org.jfjy.ch2ji.ecctv.dh.callback.RealPlayCallback;

import java.io.ByteArrayInputStream;
import java.nio.ByteBuffer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

@Slf4j
public class GetBytes2PushRTMPNew2 {

 private static final String SRS_PUSH_ADDRESS = "rtmp://127.0.0.1:1935/live/livestream";

 static int BUFFER_CAPACITY = 1 * 1024 * 1024;

 public static void main(String[] args) throws Exception {
 FFmpegLogCallback.set();
 ApiService apiService = new ApiService();
 Long login = apiService.login("10.3.0.54", 8801, "admin", "xxxx");
 ByteBuffer buffer = ByteBuffer.allocate(BUFFER_CAPACITY);
 final boolean[] isByteBufferFull = {false};
 apiService.startRealPlay(new RealPlayCallback() {
 @Override
 public void apply(Long aLong, Integer integer, byte[] bytes) {
 try {
 //push data to bytebuffer
 synchronized (buffer) {
 if (buffer.remaining() > bytes.length) {
 buffer.put(bytes);
 } else {
 isByteBufferFull[0] = true;
 }
 }
 } catch (Exception e) {
 throw new RuntimeException(e);
 }
 }
 }, 0, 0);

 ExecutorService executorService = Executors.newFixedThreadPool(1);
 executorService.execute(new Runnable() {
 @Override
 public void run() {
 while (true) {
 //get data from bytebuffer when buffer is full
 synchronized (isByteBufferFull) {
 if (isByteBufferFull[0]) {
 synchronized (buffer) {
 buffer.flip();
 byte[] bytes = new byte[buffer.remaining()];
 buffer.get(bytes);
 buffer.clear();
 isByteBufferFull[0] = false;
 try {
 //using grabber and recorder to push RTMP
 grabAndPush(bytes, SRS_PUSH_ADDRESS);
 } catch (Exception e) {
 //throw new RuntimeException(e);
 }

 }
 }
 }
 try {
 Thread.sleep(500);
 } catch (InterruptedException e) {
 throw new RuntimeException(e);
 }
 }

 }
 });
 while (true) {

 }
 }

 private static synchronized void grabAndPush(byte[] bytes, String pushAddress) throws Exception {
 avutil.av_log_set_level(avutil.AV_LOG_INFO);
 FFmpegLogCallback.set();

 FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(new ByteArrayInputStream(bytes));


 grabber.setFormat("dhav");
 grabber.start();

 AVFormatContext avFormatContext = grabber.getFormatContext();

 int streamNum = avFormatContext.nb_streams();

 if (streamNum < 1) {
 log.error("no media!");
 return;
 }

 int frameRate = (int) grabber.getVideoFrameRate();
 if (0 == frameRate) {
 frameRate = 15;
 }
 log.info("frameRate[{}],duration[{}]秒,nb_streams[{}]",
 frameRate,
 avFormatContext.duration() / 1000000,
 avFormatContext.nb_streams());

 for (int i = 0; i < streamNum; i++) {
 AVStream avStream = avFormatContext.streams(i);
 AVCodecParameters avCodecParameters = avStream.codecpar();
 log.info("stream index[{}],codec type[{}],codec ID[{}]", i, avCodecParameters.codec_type(), avCodecParameters.codec_id());
 }

 int frameWidth = grabber.getImageWidth();
 int frameHeight = grabber.getImageHeight();
 int audioChannels = grabber.getAudioChannels();

 log.info("frameWidth[{}],frameHeight[{}],audioChannels[{}]",
 frameWidth,
 frameHeight,
 audioChannels);

 FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(pushAddress,
 frameWidth,
 frameHeight,
 audioChannels);

 recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
 recorder.setInterleaved(true);

 recorder.setFormat("flv");

 recorder.setFrameRate(frameRate);

 recorder.setGopSize(frameRate);

 recorder.setAudioChannels(grabber.getAudioChannels());


 recorder.start();


 Frame frame;


 log.info("start push");

 int videoFrameNum = 0;
 int audioFrameNum = 0;
 int dataFrameNum = 0;

 int interVal = 1000 / frameRate;
 interVal /= 8;

 while (null != (frame = grabber.grab())) {

 if (null != frame.image) {
 videoFrameNum++;
 }

 if (null != frame.samples) {
 audioFrameNum++;
 }

 if (null != frame.data) {
 dataFrameNum++;
 }

 recorder.record(frame);

 Thread.sleep(interVal);
 }

 log.info("push complete,videoFrameNum[{}],audioFrameNum[{}],dataFrameNum[{}]",
 videoFrameNum,
 audioFrameNum,
 dataFrameNum);

 recorder.close();
 grabber.close();
 }


}



-
Memory Leak in c++/cli application
10 décembre 2013, par AnkushI am passing bitmap from my c# app to c++/cli dll that add it to video.
The problem is program slowly leaking memory. I tried _CrtDumpMemoryLeaks() shows me leak of bitmap & another 40 byte leak but i am disposing bitmap.
Can anyone see memory leak, Here is code..Flow :
1) Capture screenshot by takescreenshot()
2) pass it to c++/cli function
3) dispose bitmap
lines from my c# app
Bitmap snap = takescreeshot();
vencoder.AddBitmap(snap);
snap.Dispose();
vencoder.printleak();
private static Bitmap takescreeshot()
{
System.Drawing.Bitmap bitmap = null;
System.Drawing.Graphics graphics = null;
bitmap = new Bitmap
(
System.Windows.Forms.Screen.PrimaryScreen.Bounds.Width,
System.Windows.Forms.Screen.PrimaryScreen.Bounds.Height,
System.Drawing.Imaging.PixelFormat.Format24bppRgb
);
graphics = System.Drawing.Graphics.FromImage(bitmap);
graphics.CopyFromScreen(Screen.PrimaryScreen.Bounds.X, Screen.PrimaryScreen.Bounds.Y, 0, 0, Screen.PrimaryScreen.Bounds.Size);
//Write TimeSpamp
Rectangle rect = new Rectangle(1166, 738, 200, 20);
String datetime= System.String.Format("{0:dd:MM:yy hh:mm:ss}",DateTime.Now);
System.Drawing.Font sysfont = new System.Drawing.Font("Times New Roman", 14, FontStyle.Bold);
graphics.DrawString(datetime, sysfont, Brushes.Red,rect);
//
Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
Bitmap grayImage = filter.Apply(bitmap);
//Dispose
bitmap.Dispose();
graphics.Dispose();
return grayImage;
}now in c++/cli dll
bool VideoEncoder::AddBitmap(Bitmap^ bitmap)
{
BitmapData^ bitmapData = bitmap->LockBits( System::Drawing::Rectangle( 0, 0,bitmap->Width, bitmap->Height ),ImageLockMode::ReadOnly,PixelFormat::Format8bppIndexed);
uint8_t* ptr = reinterpret_cast( static_cast( bitmapData->Scan0 ) );
uint8_t* srcData[4] = { ptr, NULL, NULL, NULL };
int srcLinesize[4] = { bitmapData->Stride, 0, 0, 0 };
pCurrentPicture = CreateFFmpegPicture(pVideoStream->codec->pix_fmt, pVideoStream->codec->width, pVideoStream->codec->height);
sws_scale(pImgConvertCtx, srcData, srcLinesize, 0, bitmap->Height, pCurrentPicture->data, pCurrentPicture->linesize );
bitmap->UnlockBits( bitmapData );
write_video_frame();
bitmapData=nullptr;
ptr=NULL;
return true;
}
AVFrame * VideoEncoder::CreateFFmpegPicture(int pix_fmt, int nWidth, int nHeight)
{
AVFrame *picture = NULL;
uint8_t *picture_buf = NULL;
int size;
picture = avcodec_alloc_frame();
if ( !picture)
{
printf("Cannot create frame\n");
return NULL;
}
size = avpicture_get_size((AVPixelFormat)pix_fmt, nWidth, nHeight);
picture_buf = (uint8_t *) av_malloc(size);
if (!picture_buf)
{
av_free(picture);
printf("Cannot allocate buffer\n");
return NULL;
}
avpicture_fill((AVPicture *)picture, picture_buf,
(AVPixelFormat)pix_fmt, nWidth, nHeight);
return picture;
}
void VideoEncoder::write_video_frame()
{
AVCodecContext* codecContext = pVideoStream->codec;
int out_size, ret = 0;
if ( pFormatContext->oformat->flags & AVFMT_RAWPICTURE )
{
printf( "raw picture must be written" );
}
else
{
out_size = avcodec_encode_video( codecContext, pVideoEncodeBuffer,nSizeVideoEncodeBuffer, pCurrentPicture );
if ( out_size > 0 )
{
AVPacket packet;
av_init_packet( &packet );
if ( codecContext->coded_frame->pts != AV_NOPTS_VALUE )
{
packet.pts = av_rescale_q( packet.pts, codecContext->time_base, pVideoStream->time_base );
}
if ( codecContext->coded_frame->pkt_dts != AV_NOPTS_VALUE )
{
packet.dts = av_rescale_q( packet.dts, codecContext->time_base, pVideoStream->time_base );
}
if ( codecContext->coded_frame->key_frame )
{
packet.flags |= AV_PKT_FLAG_KEY;
}
packet.stream_index = pVideoStream->index;
packet.data = pVideoEncodeBuffer;
packet.size = out_size;
ret = av_interleaved_write_frame( pFormatContext, &packet );
av_free_packet(&packet);
av_freep(pCurrentPicture);
}
else
{
// image was buffered
}
}
if ( ret != 0 )
{
throw gcnew Exception( "Error while writing video frame." );
}
}
void VideoEncoder::printleak()
{
printf("No of leaks: %d",_CrtDumpMemoryLeaks());
printf("\n");
}