Recherche avancée

Médias (91)

Autres articles (32)

  • Personnaliser en ajoutant son logo, sa bannière ou son image de fond

    5 septembre 2013, par

    Certains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;

  • Ecrire une actualité

    21 juin 2013, par

    Présentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
    Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
    Vous pouvez personnaliser le formulaire de création d’une actualité.
    Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...)

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

Sur d’autres sites (3878)

  • Firebase function to convert YouTube to mp3

    9 octobre 2023, par satchel

    I want to deploy to Firebase cloud functions.

    


    However, I get a vague error : “Cannot analyze code” after it goes through the initial pre deploy checks successfully.

    


    But I cannot figure out the problem given the vagueness of the error.

    


    It looks right with these requirements :

    


      

    • receive a POST with JSON body of YouTube videoID as a string
    • 


    • Download locally using the YouTube download package
    • 


    • Pipe to the ffmpeg package and save mp3 to the local temp
    • 


    • Store in default bucket on firestore storage
    • 


    • Apply make public method to make public
    • 


    


    const functions = require('firebase-functions');
const admin = require('firebase-admin');
const ytdl = require('ytdl-core');
const ffmpeg = require('fluent-ffmpeg');
const fs = require('fs');
const path = require('path');
const os = require('os');

admin.initializeApp();

// Set the path to the FFmpeg binary
const ffmpegPath = path.join(__dirname, 'bin', 'ffmpeg');
ffmpeg.setFfmpegPath(ffmpegPath);

exports.audioUrl = functions.https.onRequest(async (req, res) => {
    if (req.method !== 'POST') {
        res.status(405).send('Method Not Allowed');
        return;
    }

    const videoId = req.body.videoID;
    const videoUrl = `https://www.youtube.com/watch?v=${videoId}`;
    const audioPath = path.join(os.tmpdir(), `${videoId}.mp3`);

    try {
        await new Promise((resolve, reject) => {
            ytdl(videoUrl, { filter: format => format.container === 'mp4' })
                .pipe(ffmpeg())
                .audioCodec('libmp3lame')
                .save(audioPath)
                .on('end', resolve)
                .on('error', reject);
        });

        const bucket = admin.storage().bucket();
        const file = bucket.file(`${videoId}.mp3`);
        await bucket.upload(audioPath, {
            destination: file.name,
            metadata: {
                contentType: 'audio/mp3',
            },
        });

        // Make the file publicly accessible
        await file.makePublic();

        const publicUrl = file.publicUrl();
        res.status(200).send({ url: publicUrl });
    } catch (error) {
        console.error('Error processing video:', error);
        res.status(500).send('Internal Server Error');
    }
});


    


    The following is the package.json file which is used to reference the dependencies for the function, as well as the entry point, which I believe just needs to be the name of the filename with the code :

    


    {
  "name": "firebase-functions",
  "description": "Firebase Cloud Functions",
  "main": "audioUrl.js", 
  "dependencies": {
    "firebase-admin": "^10.0.0",
    "firebase-functions": "^4.0.0",
    "ytdl-core": "^4.9.1",
    "fluent-ffmpeg": "^2.1.2"
  },
  "engines": {
    "node": "18"
  },
  "private": true
}


    


    (Edit) Here is the error :

    


     deploying functions
✔  functions: Finished running predeploy script.
i  functions: preparing codebase default for deployment
i  functions: ensuring required API cloudfunctions.googleapis.com is enabled...
i  functions: ensuring required API cloudbuild.googleapis.com is enabled...
i  artifactregistry: ensuring required API artifactregistry.googleapis.com is enabled...
✔  functions: required API cloudbuild.googleapis.com is enabled
✔  artifactregistry: required API artifactregistry.googleapis.com is enabled
✔  functions: required API cloudfunctions.googleapis.com is enabled
i  functions: Loading and analyzing source code for codebase default to determine what to deploy
Serving at port 8171

shutdown requested via /__/quitquitquit


Error: Functions codebase could not be analyzed successfully. It may have a syntax or runtime error


    


    Failed to load function definition from source: FirebaseError: Functions codebase could not be analyzed successfully. It may have a syntax or runtime error


    


    I get the same error when running the following :

    


    firebase deploy --only functions:audioUrl


    


    And I thought I might get more detailed errors using the emulator :

    


    firebase emulators:start


    


    Under the emulator I had this additional error initially :

    


    Your requested "node" version "18" doesn't match your global version "16". Using node@16 from host.


    


  • YouTube - MP4 to MP3 messes up

    23 novembre 2019, par theeSpark

    It’s supposed to download all the videos in the playlist and convert them to mp3. But all this does is make the mp4’s and 1 empty mp3 with a number higher than the max mp4. My newbie brain doesn’t know how to fix this...

    var ytpl = require('ytpl');
    var fs = require('fs-extra');
    var path = require('path');
    var ffmpeg = require('fluent-ffmpeg');
    var binaries = require('ffmpeg-static');
    var ytdl = require('ytdl-core');

    var output_dir = path.join(__dirname+"/dl");

    ytpl("PL8n8S4mVUWvprlN2dCAMoIo6h47ZwR_gn", (err, pl) => {
       if(err) throw err;

       let c = 0;

       pl.items.forEach((i) => {
           ytdl(i.url_simple+"", { filter: 'audioonly' }).pipe(fs.createWriteStream(output_dir+"/"+c+".mp4")).on('finish', () => {
               console.log("Finished MP4 DL, starting conversion...");
               ffmpeg(output_dir+"/"+c+".mp4")
                   .setFfmpegPath(binaries.path)
                   .format('mp3')
                   .audioBitrate(320)
                   .output(fs.createWriteStream(output_dir+"/"+c+".mp3"))
                   .on('end', () => {
                       console.log("Finished MP3 Convert...");
                   })
                   .run();
           });
           c++;
       });

    });
  • Is it possible to establish youtube live stream with ffmgep from Android if video encoded on another device

    20 mai 2015, par Nick Kleban

    I have "Android Device" and some other "Device with camera". This device captures video, encodes it (h264) and sends it to my phone through UDP. On phone i’m receiving only AVFrame’s and it enough to decode and show this video on phone screen. But i can’t establish working video stream to youtube.

    The problem is that all examples does encoding and streaming on one device, so they have properly initialized AVStream and AVPaket. I have feeling that i’m missing something but i cant find what. I’ve passed all stages of creating broadcast, stream, initialization of AVFormatContext, and av_interleaved_write_frame returns 0. Seems like all ok.
    But on youtube i see short living indication of video stream quality, then it dissapears, and all the time there is no video.

    So the question is is it possible to establish video stream to Youtube live if you have only AVFrames that encoded on some other device, and you missing some context information ? If so - what i’m missing ?

    static AVFormatContext *fmt_context = NULL;
    static AVStream *video_stream;

    eLIVESTREAM_ERROR LIVESTREAM_Manager_Start(char* url) {
       eLIVESTREAM_ERROR error = LIVESTREAM_ERROR;
       av_register_all();
       avcodec_register_all();
       fmt_context = avformat_alloc_context();

       AVOutputFormat *ofmt = NULL;
       if (fmt_context != NULL) {
           ofmt = av_guess_format("flv", NULL, NULL);
           if (ofmt != NULL) {
               fmt_context->oformat = ofmt;

               video_stream = av_new_stream(fmt_context, 0);
               AVCodec *video_codec = avcodec_find_decoder(AV_CODEC_ID_H264);
               AVCodecContext *video_codec_ctx = video_stream->codec;
               video_codec_ctx->pix_fmt = PIX_FMT_YUV420P;
               video_codec_ctx->skip_frame = AVDISCARD_DEFAULT;
               video_codec_ctx->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK;
               video_codec_ctx->skip_loop_filter = AVDISCARD_DEFAULT;
               video_codec_ctx->workaround_bugs = FF_BUG_AUTODETECT;
               video_codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
               video_codec_ctx->codec_id = AV_CODEC_ID_H264;
               video_codec_ctx->skip_idct = AVDISCARD_DEFAULT;
               video_codec_ctx->time_base.num = 1;
               video_codec_ctx->time_base.den = 30;
               video_codec_ctx->width = 640;
               video_codec_ctx->height = 386;

               if(fmt_context->oformat->flags & AVFMT_GLOBALHEADER)  
                  video_codec_ctx->flags |= CODEC_FLAG_GLOBAL_HEADER;

               int codec_open_rslt = avcodec_open2(video_codec_ctx, video_codec, NULL);
               if (codec_open_rslt < 0) {
                   error = LIVESTREAM_ERROR;
               }

               if (!(ofmt->flags & AVFMT_NOFILE)) {
                   int open_rslt = avio_open(&fmt_context->pb, url, URL_WRONLY);
                   if (open_rslt == 0) {
                       int wrt_header_rslt = avformat_write_header(fmt_context, NULL);
                       if (wrt_header_rslt == 0) {
                           error = LIVESTREAM_OK;
                       } else {
                           avio_close(fmt_context->pb);
                       }
                   }
               }
           }
       }
       if (error != LIVESTREAM_OK) {
           if (ofmt != NULL) {
               av_free(ofmt);
           }
           if (fmt_context != NULL) {
               av_free(fmt_context);
           }
       }
       return error;
    }

    eLIVESTREAM_ERROR LIVESTREAM_Manager_Send (uint8_t *data , int size) {
       eLIVESTREAM_ERROR error = LIVESTREAM_OK;
       if (fmt_context == NULL || size <= 0 || data == NULL) {
           error = LIVESTREAM_ERROR;
       }
       if (error == LIVESTREAM_OK) {
           AVPacket pkt;
           av_init_packet(&pkt);
           pkt.stream_index = video_stream->index;
           pkt.data = data;
           pkt.size = size;
           pkt.pts = AV_NOPTS_VALUE;
           pkt.dts = AV_NOPTS_VALUE;
           pkt.duration = 0;
           pkt.pos = -1;
           int write_rslt = av_interleaved_write_frame(fmt_context, &pkt);
           if (write_rslt != 0) {
               error = LIVESTREAM_ERROR;
           }
       }
       return error;
    }

    void LIVESTREAM_Manager_Finish () {
       av_write_trailer(fmt_context);
       avio_close(fmt_context->pb);
       av_free(fmt_context);
       fmt_context = NULL;
    }