
Recherche avancée
Médias (2)
-
Granite de l’Aber Ildut
9 septembre 2011, par
Mis à jour : Septembre 2011
Langue : français
Type : Texte
-
Géodiversité
9 septembre 2011, par ,
Mis à jour : Août 2018
Langue : français
Type : Texte
Autres articles (59)
-
List of compatible distributions
26 avril 2011, parThe table below is the list of Linux distributions compatible with the automated installation script of MediaSPIP. Distribution nameVersion nameVersion number Debian Squeeze 6.x.x Debian Weezy 7.x.x Debian Jessie 8.x.x Ubuntu The Precise Pangolin 12.04 LTS Ubuntu The Trusty Tahr 14.04
If you want to help us improve this list, you can provide us access to a machine whose distribution is not mentioned above or send the necessary fixes to add (...) -
MediaSPIP v0.2
21 juin 2013, parMediaSPIP 0.2 est la première version de MediaSPIP stable.
Sa date de sortie officielle est le 21 juin 2013 et est annoncée ici.
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Comme pour la version précédente, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
Mise à disposition des fichiers
14 avril 2011, parPar défaut, lors de son initialisation, MediaSPIP ne permet pas aux visiteurs de télécharger les fichiers qu’ils soient originaux ou le résultat de leur transformation ou encodage. Il permet uniquement de les visualiser.
Cependant, il est possible et facile d’autoriser les visiteurs à avoir accès à ces documents et ce sous différentes formes.
Tout cela se passe dans la page de configuration du squelette. Il vous faut aller dans l’espace d’administration du canal, et choisir dans la navigation (...)
Sur d’autres sites (6336)
-
Revision 43464 : fournir la fonction inc_yaml_to_array_dist necessaire a la boucle DATA
9 janvier 2011, par fil@… — Logfournir la fonction inc_yaml_to_array_dist necessaire a la boucle DATA
-
Android screenshot raw data conversion
30 juin 2014, par user3607576I’m trying to take screenshots of an Android rooted device. I managed to access the framebuffer and get the raw data. Now I have to use the ffmpeg command to convert it to a png format, like so :
ffmpeg -vcodec rawvideo -f rawvideo -pix_fmt rgb32 -s 240x320 -i fb0 -f image2 -vcodec png image%d.png
My question is, where is the output png image ? Where is it saved ? -
How to get picture buffer data in ffmpeg ?
5 juin 2014, par 4ntoineI’m trying to pass bitmap from ffmpeg to android.
It already works but it’s displaying picture right on surface passed from java to native code.
How can i get frame buffer bitmap data to pass it to java ?I’ve tried to save out_frame buffer data :
unsigned char bmpFileHeader[14] = {'B', 'M', 0,0,0,0, 0,0, 0,0, 54, 0,0,0};
unsigned char bmpInfoHeader[40] = {40,0,0,0, 0,0,0,0, 0,0,0,0, 1,0, 24,0};
unsigned char bmpPad[3] = {0, 0, 0};
void saveBuffer(int fileIndex, int width, int height, unsigned char *buffer, int buffer_size) {
unsigned char filename[1024];
sprintf(filename, "/storage/sdcard0/3d_player_%d.bmp", fileIndex);
LOGI(10, "saving ffmpeg bitmap file: %d to %s", fileIndex, filename);
FILE *bitmapFile = fopen(filename, "wb");
if (!bitmapFile) {
LOGE(10, "failed to create ffmpeg bitmap file");
return;
}
unsigned char filesize = 54 + 3 * width * height; // 3 = (r,g,b)
bmpFileHeader[2] = (unsigned char)(filesize);
bmpFileHeader[3] = (unsigned char)(filesize >> 8);
bmpFileHeader[4] = (unsigned char)(filesize >> 16);
bmpFileHeader[5] = (unsigned char)(filesize >> 24);
bmpInfoHeader[4] = (unsigned char)(width);
bmpInfoHeader[5] = (unsigned char)(width >> 8);
bmpInfoHeader[6] = (unsigned char)(width >> 16);
bmpInfoHeader[7] = (unsigned char)(width >> 24);
bmpInfoHeader[8] = (unsigned char)(height);
bmpInfoHeader[9] = (unsigned char)(height >> 8);
bmpInfoHeader[10] = (unsigned char)(height >> 16);
bmpInfoHeader[11] = (unsigned char)(height >> 24);
fwrite(bmpFileHeader, 1, 14, bitmapFile);
fwrite(bmpInfoHeader, 1, 40, bitmapFile);
int i;
for (i=0; iplayer;
int stream_no = decoder_data->stream_no;
AVCodecContext * ctx = player->input_codec_ctxs[stream_no];
AVFrame * frame = player->input_frames[stream_no];
AVStream * stream = player->input_streams[stream_no];
int interrupt_ret;
int to_write;
int err = 0;
AVFrame *rgb_frame = player->rgb_frame;
ANativeWindow_Buffer buffer;
ANativeWindow * window;
#ifdef MEASURE_TIME
struct timespec timespec1, timespec2, diff;
#endif // MEASURE_TIME
LOGI(10, "player_decode_video decoding");
int frameFinished;
#ifdef MEASURE_TIME
clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &timespec1);
#endif // MEASURE_TIME
int ret = avcodec_decode_video2(ctx, frame, &frameFinished,
packet_data->packet);
#ifdef MEASURE_TIME
clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &timespec2);
diff = timespec_diff(timespec1, timespec2);
LOGI(3, "decode_video timediff: %d.%9ld", diff.tv_sec, diff.tv_nsec);
#endif // MEASURE_TIME
if (ret < 0) {
LOGE(1, "player_decode_video Fail decoding video %d\n", ret);
return -ERROR_WHILE_DECODING_VIDEO;
}
if (!frameFinished) {
LOGI(10, "player_decode_video Video frame not finished\n");
return 0;
}
// saving in buffer converted video frame
LOGI(7, "player_decode_video copy wait");
#ifdef MEASURE_TIME
clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &timespec1);
#endif // MEASURE_TIME
pthread_mutex_lock(&player->mutex_queue);
window = player->window;
if (window == NULL) {
pthread_mutex_unlock(&player->mutex_queue);
goto skip_frame;
}
ANativeWindow_setBuffersGeometry(window, ctx->width, ctx->height,
WINDOW_FORMAT_RGBA_8888);
if (ANativeWindow_lock(window, &buffer, NULL) != 0) {
pthread_mutex_unlock(&player->mutex_queue);
goto skip_frame;
}
pthread_mutex_unlock(&player->mutex_queue);
int format = buffer.format;
if (format < 0) {
LOGE(1, "Could not get window format")
}
enum PixelFormat out_format;
if (format == WINDOW_FORMAT_RGBA_8888) {
out_format = PIX_FMT_RGBA;
LOGI(6, "Format: WINDOW_FORMAT_RGBA_8888");
} else if (format == WINDOW_FORMAT_RGBX_8888) {
out_format = PIX_FMT_RGB0;
LOGE(1, "Format: WINDOW_FORMAT_RGBX_8888 (not supported)");
} else if (format == WINDOW_FORMAT_RGB_565) {
out_format = PIX_FMT_RGB565;
LOGE(1, "Format: WINDOW_FORMAT_RGB_565 (not supported)");
} else {
LOGE(1, "Unknown window format");
}
avpicture_fill((AVPicture *) rgb_frame, buffer.bits, out_format,
buffer.width, buffer.height);
rgb_frame->data[0] = buffer.bits;
if (format == WINDOW_FORMAT_RGBA_8888) {
rgb_frame->linesize[0] = buffer.stride * 4;
} else {
LOGE(1, "Unknown window format");
}
LOGI(6,
"Buffer: width: %d, height: %d, stride: %d",
buffer.width, buffer.height, buffer.stride);
int i = 0;
#ifdef MEASURE_TIME
clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &timespec2);
diff = timespec_diff(timespec1, timespec2);
LOGI(1,
"lockPixels and fillimage timediff: %d.%9ld", diff.tv_sec, diff.tv_nsec);
#endif // MEASURE_TIME
#ifdef MEASURE_TIME
clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &timespec1);
#endif // MEASURE_TIME
LOGI(7, "player_decode_video copying...");
AVFrame * out_frame;
int rescale;
if (ctx->width == buffer.width && ctx->height == buffer.height) {
// This always should be true
out_frame = rgb_frame;
rescale = FALSE;
} else {
out_frame = player->tmp_frame2;
rescale = TRUE;
}
if (ctx->pix_fmt == PIX_FMT_YUV420P) {
__I420ToARGB(frame->data[0], frame->linesize[0], frame->data[2],
frame->linesize[2], frame->data[1], frame->linesize[1],
out_frame->data[0], out_frame->linesize[0], ctx->width,
ctx->height);
} else if (ctx->pix_fmt == PIX_FMT_NV12) {
__NV21ToARGB(frame->data[0], frame->linesize[0], frame->data[1],
frame->linesize[1], out_frame->data[0], out_frame->linesize[0],
ctx->width, ctx->height);
} else {
LOGI(3, "Using slow conversion: %d ", ctx->pix_fmt);
struct SwsContext *sws_context = player->sws_context;
sws_context = sws_getCachedContext(sws_context, ctx->width, ctx->height,
ctx->pix_fmt, ctx->width, ctx->height, out_format,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
player->sws_context = sws_context;
if (sws_context == NULL) {
LOGE(1, "could not initialize conversion context from: %d"
", to :%d\n", ctx->pix_fmt, out_format);
// TODO some error
}
sws_scale(sws_context, (const uint8_t * const *) frame->data,
frame->linesize, 0, ctx->height, out_frame->data,
out_frame->linesize);
}
if (rescale) {
// Never occurs
__ARGBScale(out_frame->data[0], out_frame->linesize[0], ctx->width,
ctx->height, rgb_frame->data[0], rgb_frame->linesize[0],
buffer.width, buffer.height, __kFilterNone);
out_frame = rgb_frame;
}
// TODO: (4ntoine) frame decoded and rescaled, ready to call callback with frame picture from buffer
int bufferSize = buffer.width * buffer.height * 3; // 3 = (r,g,b);
static int bitmapCounter = 0;
if (bitmapCounter < 10) {
saveBuffer(bitmapCounter++, buffer.width, buffer.height, (unsigned char *)out_frame->data, bufferSize);
}but out_frame is empty and file has header and 0x00 bytes body.
How to get picture buffer data in ffmpeg ?