
Recherche avancée
Médias (1)
-
La conservation du net art au musée. Les stratégies à l’œuvre
26 mai 2011
Mis à jour : Juillet 2013
Langue : français
Type : Texte
Autres articles (76)
-
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir -
Le plugin : Podcasts.
14 juillet 2010, parLe problème du podcasting est à nouveau un problème révélateur de la normalisation des transports de données sur Internet.
Deux formats intéressants existent : Celui développé par Apple, très axé sur l’utilisation d’iTunes dont la SPEC est ici ; Le format "Media RSS Module" qui est plus "libre" notamment soutenu par Yahoo et le logiciel Miro ;
Types de fichiers supportés dans les flux
Le format d’Apple n’autorise que les formats suivants dans ses flux : .mp3 audio/mpeg .m4a audio/x-m4a .mp4 (...) -
ANNEXE : Les plugins utilisés spécifiquement pour la ferme
5 mars 2010, parLe site central/maître de la ferme a besoin d’utiliser plusieurs plugins supplémentaires vis à vis des canaux pour son bon fonctionnement. le plugin Gestion de la mutualisation ; le plugin inscription3 pour gérer les inscriptions et les demandes de création d’instance de mutualisation dès l’inscription des utilisateurs ; le plugin verifier qui fournit une API de vérification des champs (utilisé par inscription3) ; le plugin champs extras v2 nécessité par inscription3 (...)
Sur d’autres sites (11613)
-
C++ ffmpeg lib version 7.0 - runtime error
1er septembre 2024, par Chris PI want to make a C++ lib named cppdub which will mimic the python module pydub.


One main function is to export the AudioSegment to a file with a specific format (example : mp3).


The code is :


void check_av_error(int error_code, const std::string& msg) {
 if (error_code < 0) {
 char errbuf[AV_ERROR_MAX_STRING_SIZE];
 av_strerror(error_code, errbuf, sizeof(errbuf));
 throw std::runtime_error(msg + ": " + errbuf);
 }
}

std::string av_err2str_(int errnum) {
 char buf[AV_ERROR_MAX_STRING_SIZE];
 av_strerror(errnum, buf, sizeof(buf));
 return std::string(buf);
}

void log_error(const std::string& msg) {
 std::cerr << "Error: " << msg << std::endl;
}

std::ofstream cppdub::AudioSegment::export_segment(
 std::string& out_f,
 const std::string& format,
 const std::string& codec,
 const std::string& bitrate,
 const std::vector& parameters,
 const std::map& tags,
 const std::string& id3v2_version,
 const std::string& cover) {

 av_log_set_level(AV_LOG_DEBUG);
 avformat_network_init();

 AVFormatContext* format_ctx = nullptr;
 int ret = avformat_alloc_output_context2(&format_ctx, nullptr, format.c_str(), out_f.c_str());
 check_av_error(ret, "Could not allocate format context");

 if (!(format_ctx->oformat->flags & AVFMT_NOFILE)) {
 ret = avio_open(&format_ctx->pb, out_f.c_str(), AVIO_FLAG_WRITE);
 check_av_error(ret, "Could not open output file");
 }

 AVStream* stream = avformat_new_stream(format_ctx, nullptr);
 if (!stream) {
 avformat_free_context(format_ctx);
 throw std::runtime_error("Could not allocate stream");
 }

 const AVCodec* codec_obj = avcodec_find_encoder_by_name(codec.c_str());
 if (!codec_obj) {
 avformat_free_context(format_ctx);
 throw std::runtime_error("Codec not found");
 }

 AVCodecContext* codec_ctx = avcodec_alloc_context3(codec_obj);
 if (!codec_ctx) {
 avformat_free_context(format_ctx);
 throw std::runtime_error("Could not allocate codec context");
 }

 codec_ctx->sample_rate = this->get_frame_rate();
 AVChannelLayout ch_layout_1;
 av_channel_layout_uninit(&ch_layout_1);
 av_channel_layout_default(&ch_layout_1, 2);
 codec_ctx->ch_layout = ch_layout_1; // Adjust based on your needs
 codec_ctx->bit_rate = std::stoi(bitrate);
 codec_ctx->sample_fmt = codec_obj->sample_fmts[0];

 if (format_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
 codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
 }

 ret = avcodec_open2(codec_ctx, codec_obj, nullptr);
 check_av_error(ret, "Could not open codec");

 stream->time_base = { 1, codec_ctx->sample_rate };
 ret = avcodec_parameters_from_context(stream->codecpar, codec_ctx);
 check_av_error(ret, "Could not set codec parameters");

 ret = avformat_write_header(format_ctx, nullptr);
 check_av_error(ret, "Error occurred when writing header");

 AVPacket pkt;
 av_init_packet(&pkt);
 pkt.data = nullptr;
 pkt.size = 0;

 int frame_size = av_samples_get_buffer_size(nullptr, codec_ctx->ch_layout.nb_channels,
 codec_ctx->frame_size, codec_ctx->sample_fmt, 0);
 check_av_error(frame_size, "Could not calculate frame size");

 AVFrame* frame = av_frame_alloc();
 if (!frame) {
 avcodec_free_context(&codec_ctx);
 avformat_free_context(format_ctx);
 throw std::runtime_error("Error allocating frame");
 }

 frame->format = codec_ctx->sample_fmt;
 frame->ch_layout = codec_ctx->ch_layout;
 frame->sample_rate = codec_ctx->sample_rate;
 frame->nb_samples = codec_ctx->frame_size;

 ret = av_frame_get_buffer(frame, 0);
 if (ret < 0) {
 av_frame_free(&frame);
 avcodec_free_context(&codec_ctx);
 avformat_free_context(format_ctx);
 throw std::runtime_error("Error allocating frame buffer: " + av_err2str_(ret));
 }

 size_t data_offset = 0;

 while (data_offset < this->raw_data().size()) {
 int samples_to_process = std::min(frame_size, static_cast<int>(this->raw_data().size()) - static_cast<int>(data_offset));

 // Fill the frame with audio data
 ret = avcodec_fill_audio_frame(frame, codec_ctx->ch_layout.nb_channels, codec_ctx->sample_fmt,
 reinterpret_cast<const>(this->raw_data().data()) + data_offset,
 samples_to_process, 0);
 if (ret < 0) {
 log_error("Error filling audio frame: " + av_err2str_(ret));
 av_frame_free(&frame);
 avcodec_free_context(&codec_ctx);
 avformat_free_context(format_ctx);
 throw std::runtime_error("Error filling audio frame: " + av_err2str_(ret));
 }

 data_offset += samples_to_process;

 ret = avcodec_send_frame(codec_ctx, frame);
 if (ret < 0) {
 log_error("Error sending frame for encoding: " + av_err2str_(ret));
 av_frame_free(&frame);
 avcodec_free_context(&codec_ctx);
 avformat_free_context(format_ctx);
 throw std::runtime_error("Error sending frame for encoding: " + av_err2str_(ret));
 }

 while (ret >= 0) {
 ret = avcodec_receive_packet(codec_ctx, &pkt);
 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
 break;
 }
 check_av_error(ret, "Error receiving packet");

 pkt.stream_index = stream->index;

 ret = av_interleaved_write_frame(format_ctx, &pkt);
 check_av_error(ret, "Error writing encoded frame to output file");

 av_packet_unref(&pkt);
 }
 }

 // Flush the encoder
 avcodec_send_frame(codec_ctx, nullptr);
 while (avcodec_receive_packet(codec_ctx, &pkt) == 0) {
 pkt.stream_index = stream->index;
 av_interleaved_write_frame(format_ctx, &pkt);
 av_packet_unref(&pkt);
 }

 av_write_trailer(format_ctx);

 av_frame_free(&frame);
 avcodec_free_context(&codec_ctx);
 avformat_free_context(format_ctx);

 return std::ofstream(out_f, std::ios::binary);
}
</const></int></int>


The runtime error is :


Exception thrown at 0x00007FF945137C9B (avcodec-61.dll) in cppdub_test.exe : 0xC0000005 : Access violation reading location 0x0000024CBCD25080.


for line :


ret = avcodec_send_frame(codec_ctx, frame);



Call stack :


avcodec-61.dll!00007ff945137c9b() Unknown
 avcodec-61.dll!00007ff9451381bb() Unknown
 avcodec-61.dll!00007ff945139679() Unknown
 avcodec-61.dll!00007ff94371521d() Unknown
 avcodec-61.dll!00007ff9434a80c2() Unknown
 avcodec-61.dll!00007ff9434a84a6() Unknown
 avcodec-61.dll!00007ff9434a8749() Unknown
> cppdub_test.exe!cppdub::AudioSegment::export_segment(std::string & out_f, const std::string & format, const std::string & codec, const std::string & bitrate, const std::vector> & parameters, const std::map,std::allocator>> & tags, const std::string & id3v2_version, const std::string & cover) Line 572 C++
 cppdub_test.exe!main() Line 33 C++
 [External Code] 




Autos :


+ this 0x000000d3a08ff690 {data_="\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0... ...} cppdub::AudioSegment *
+ bitrate "128000" const std::string &
+ ch_layout_1 {order=AV_CHANNEL_ORDER_NATIVE (1) nb_channels=2 u={mask=3 map=0x0000000000000003 {id=??? name=... opaque=...} } ...} AVChannelLayout
+ codec "libmp3lame" const std::string &
+ codec_ctx 0x0000024cbc78c240 {av_class=avcodec-61.dll!0x00007ff94789c760 {class_name=0x00007ff94789c740 "AVCodecContext" ...} ...} AVCodecContext *
+ codec_obj avcodec-61.dll!0x00007ff9477fa4c0 (load symbols for additional information) {name=0x00007ff9477fa47c "libmp3lame" ...} const AVCodec *
+ cover "" const std::string &
 data_offset 9216 unsigned __int64
+ format "mp3" const std::string &
+ format_ctx 0x0000024cbc788a40 {av_class=avformat-61.dll!0x00007ff99eb09fe0 {class_name=0x00007ff99eb09fc0 "AVFormatContext" ...} ...} AVFormatContext *
+ frame 0x0000024cbc787380 {data=0x0000024cbc787380 {0x0000024cbcd25080 <error reading="reading" characters="characters" of="of">, ...} ...} AVFrame *
 frame_size 9216 int
+ id3v2_version "4" const std::string &
+ out_f "ha-ha-ha.mp3" std::string &
+ parameters { size=0 } const std::vector> &
+ pkt {buf=0x0000000000000000 <null> pts=-9223372036854775808 dts=-9223372036854775808 ...} AVPacket
 ret 9216 int
 samples_to_process 9216 int
+ stream 0x0000024cbc789bc0 {av_class=avformat-61.dll!0x00007ff99eb09840 {class_name=0x00007ff99eb09820 "AVStream" ...} ...} AVStream *
+ tags { size=0 } const std::map,std::allocator>> &
</null></error>


-
FFmpeg 6.0 won't work because the header files can't connect or interact with each other. How do I fix the files ?
11 septembre 2023, par Señor TontoI am creating a win32 API C++ application with Microsoft Visual Studio 2022 in a .sln file. I've got quite far in basic functionalities & decided to try out video decoding & playing ; I've practiced this before but not with C++. So, I looked to see good libraries & found FFmpeg. Of course, I thought this would be quite straightforward - just import the headers & code, right ? No. Firstly, the FFmpeg I'm using is one of the pre-built binaries. The 'ffmpeg-master-latest-win64-gpl-shared.zip' From the BtbN Github repository. I decompressed it with WinRar & placed the files in Program Files (x86). Later on, I realised my mistake of placing it with 32-bit programs since it's a 64-bit build. So I transferred it to Program Files. Even after this though my issue persists. The issue being that the .h files of FFmpeg cannot communicate with each other. For some reason, they can't navigate to where the header files are. I am quite sure the reason for this may have something to do with where I save the files, the directory. Nowhere on the FFmpeg website can I see where you're expected to have the file directory at. I am sure there's a preset file path that is expected. Maybe I've named the FFmpeg folder incorrectly ? Or maybe it's not meant to go in Program Files ? The current directory for the FFmpeg folder for me is : C :\Program Files\FFmpeg. Below I will provide pictures of the errors I get where the code can't connect to other .h files as well as the file path. I'll also provide my code.


#include //imports the main win32 API library
#include //imports macros for handling Unicode & ASCII char sets
#include //defines the common control classes
#include //imports the standard C library
#include 
#include 
#include <iostream>
#include <fstream>
#include <string>

extern "C"
{
#include 
#include 
#include 
#include 
}

#pragma comment(lib, "Comctl32.lib") //tells the linker to include Comctl32.lib in the .exe
#pragma comment(lib, "C:\\Program Files\\FFmpeg\\lib\\avcodec.lib")
#pragma comment(lib, "C:\\Program Files\\FFmpeg\\lib\\avformat.lib")
#pragma comment(lib, "C:\\Program Files\\FFmpeg\\lib\\avutil.lib")
#pragma comment(lib, "C:\\Program Files\\FFmpeg\\lib\\swscale.lib")

#define EDIT_CONTROL 1
#define OPEN_FILE_BTN 2
#define SAVE_FILE_BTN 3
#define EMBOLDEN_BTN 4
#define ITALICISE_BTN 5
#define SCROLL_CONTAINER 6
#define FILEMENU_OPEN_FILE_BTN 7
#define ADD_ROW_BTN 8
#define CELL_1_ID 9
#define CELL_2_ID 10
#define CELL_3_ID 11
#define SCROLL_CONTAINER_TBL 12
// 946 0759 0609 | 163 739

HINSTANCE g_hInstance;
HWND g_hWndMain, g_hWndTabs, openFileBtn, saveFileBtn, hOpenFileEdit, hScrollContainer, hEditControl, tabHandle, emboldenBtn, italiciseBtn, FilemenuOpenFileBtn, tableContainer, tblHeaderOne,
tblHeaderTwo, tblHeaderThree, addRowBtn, hAddRowDialogue, hWnd, hWndCell1Label, hWndCell1Edit, hWndCell2Label, hWndCell2Edit, hWndCell3Label, hWndCell3Edit, hWndOkButton, hWndCancelButton, hRow, hCell1, hCell2, hCell3,
hWMP, OpenMp4Btn, hWMPContainer, hPlayBtn;
HMENU hMenu, hSubMenu;
bool isBold = false, isItalic = false;


const int startX = 0;
const int startY = 60;
const int ROW_HEIGHT = 20;
const int CELL_WIDTH = 110;
static int numRows = 1;
static int numCols = 3;

LRESULT CALLBACK WindowProcessMessages(HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam);
INT_PTR CALLBACK AddRowDlgProc(HWND hwndDlg, UINT msg, WPARAM wParam, LPARAM lParam);

int WINAPI wWinMain(
 _In_ HINSTANCE currentInstance,
 _In_opt_ HINSTANCE previousInstance,
 _In_ LPWSTR cmdLine,
 _In_ int cmdCount)
{
 const wchar_t* CLASS_NAME = L"Windows App";
 WNDCLASS wc{};
 wc.hInstance = currentInstance;
 wc.lpszClassName = CLASS_NAME;
 wc.hCursor = LoadCursor(nullptr, IDC_ARROW);
 wc.hbrBackground = (HBRUSH)COLOR_WINDOW;
 wc.lpfnWndProc = WindowProcessMessages;
 RegisterClass(&wc);

 g_hWndMain = CreateWindow(CLASS_NAME, L"Windows App",
 WS_OVERLAPPEDWINDOW,
 CW_USEDEFAULT, CW_USEDEFAULT,
 800, 600,
 nullptr, nullptr, nullptr, nullptr);

 if (g_hWndMain == NULL) {
 return 0;
 }

 // Initialize common controls
 INITCOMMONCONTROLSEX icex;
 icex.dwSize = sizeof(INITCOMMONCONTROLSEX);
 icex.dwICC = ICC_TAB_CLASSES;
 InitCommonControlsEx(&icex);

 // Create tab control
 g_hWndTabs = CreateWindow(WC_TABCONTROL, L"",
 WS_CHILD | WS_CLIPSIBLINGS | WS_VISIBLE | TCS_SINGLELINE,
 0, 0, 800, 600,
 g_hWndMain, nullptr, currentInstance, nullptr);

 if (g_hWndTabs == NULL) {
 return 0;
 }

 // Add tabs to tab control, seperate tab later
 TCITEM tcitem;
 tcitem.mask = TCIF_TEXT;

 wchar_t buf1[] = L"Table View";
 tcitem.pszText = buf1;
 TabCtrl_InsertItem(g_hWndTabs, 0, &tcitem);

 wchar_t buf2[] = L"Text Files";
 tcitem.pszText = buf2;
 TabCtrl_InsertItem(g_hWndTabs, 1, &tcitem);

 wchar_t buf3[] = L"mp4 Files";
 tcitem.pszText = buf3;
 TabCtrl_InsertItem(g_hWndTabs, 2, &tcitem);



 //original location of button intitialisation


 ShowWindow(g_hWndMain, SW_SHOWDEFAULT);
 UpdateWindow(g_hWndMain);

 MSG msg{};
 while (GetMessage(&msg, nullptr, 0, 0)) {
 TranslateMessage(&msg);
 DispatchMessage(&msg);
 }
 return 0;
}

</string></fstream></iostream>


I severely doubt my code has anything to do with the issue though. It's probably directory-based. I just placed wWinmain here to keep with the character limit.






-
Recording a video using MediaRecorder
21 juillet 2016, par Cédric PortmannI am currently using the TextureFromCameraActivity from Grafika to record a video in square ( 1:1 ) resolution. Therefor I the GLES20.glViewport so that the video gets moved to the top and it appears to be squared. Now I would like to record this square view using the MediaRecorder or at least record the camera with normal resolutiona and then crop it using FFmpeg. However I get the same error over and over again and I cant figure out why.
The error I get :
start called in an invalid state : 4
And yes I added all the necessary permissions.
android.permission.WRITE_EXTERNAL_STORAGE android.permission.CAMERA
android.permission.RECORD_VIDEO android.permission.RECORD_AUDIO
android.permission.STORAGE android.permission.READ_EXTERNAL_STORAGEHere the modified code :
https://github.com/google/grafika
Thanks for your help :D
package com.android.grafika;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.SeekBar;
import android.widget.TextView;
import android.app.Activity;
import android.widget.Toast;
import com.android.grafika.gles.Drawable2d;
import com.android.grafika.gles.EglCore;
import com.android.grafika.gles.GlUtil;
import com.android.grafika.gles.Sprite2d;
import com.android.grafika.gles.Texture2dProgram;
import com.android.grafika.gles.WindowSurface;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
public class TextureFromCameraActivity extends Activity implements View.OnClickListener, SurfaceHolder.Callback,
SeekBar.OnSeekBarChangeListener {
private static final int DEFAULT_ZOOM_PERCENT = 0; // 0-100
private static final int DEFAULT_SIZE_PERCENT = 80; // 0-100
private static final int DEFAULT_ROTATE_PERCENT = 75; // 0-100
// Requested values; actual may differ.
private static final int REQ_CAMERA_WIDTH = 720;
private static final int REQ_CAMERA_HEIGHT = 720;
private static final int REQ_CAMERA_FPS = 30;
// The holder for our SurfaceView. The Surface can outlive the Activity (e.g. when
// the screen is turned off and back on with the power button).
//
// This becomes non-null after the surfaceCreated() callback is called, and gets set
// to null when surfaceDestroyed() is called.
private static SurfaceHolder sSurfaceHolder;
// Thread that handles rendering and controls the camera. Started in onResume(),
// stopped in onPause().
private RenderThread mRenderThread;
// Receives messages from renderer thread.
private MainHandler mHandler;
// User controls.
private SeekBar mZoomBar;
private SeekBar mSizeBar;
private SeekBar mRotateBar;
// These values are passed to us by the camera/render thread, and displayed in the UI.
// We could also just peek at the values in the RenderThread object, but we'd need to
// synchronize access carefully.
private int mCameraPreviewWidth, mCameraPreviewHeight;
private float mCameraPreviewFps;
private int mRectWidth, mRectHeight;
private int mZoomWidth, mZoomHeight;
private int mRotateDeg;
SurfaceHolder sh;
MediaRecorder recorder;
SurfaceHolder holder;
boolean recording = false;
public static final String TAG = "VIDEOCAPTURE";
private static final File OUTPUT_DIR = Environment.getExternalStorageDirectory();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
recorder = new MediaRecorder();
setContentView(R.layout.activity_texture_from_camera);
mHandler = new MainHandler(this);
SurfaceView cameraView = (SurfaceView) findViewById(R.id.cameraOnTexture_surfaceView);
sh = cameraView.getHolder();
cameraView.setClickable(true);// make the surface view clickable
sh.addCallback(this);
//prepareRecorder();
mZoomBar = (SeekBar) findViewById(R.id.tfcZoom_seekbar);
mSizeBar = (SeekBar) findViewById(R.id.tfcSize_seekbar);
mRotateBar = (SeekBar) findViewById(R.id.tfcRotate_seekbar);
mZoomBar.setProgress(DEFAULT_ZOOM_PERCENT);
mSizeBar.setProgress(DEFAULT_SIZE_PERCENT);
mRotateBar.setProgress(DEFAULT_ROTATE_PERCENT);
mZoomBar.setOnSeekBarChangeListener(this);
mSizeBar.setOnSeekBarChangeListener(this);
mRotateBar.setOnSeekBarChangeListener(this);
Button record_btn = (Button)findViewById(R.id.button);
record_btn.setOnClickListener(this);
initRecorder();
updateControls();
}
@Override
protected void onResume() {
Log.d(TAG, "onResume BEGIN");
super.onResume();
mRenderThread = new RenderThread(mHandler);
mRenderThread.setName("TexFromCam Render");
mRenderThread.start();
mRenderThread.waitUntilReady();
RenderHandler rh = mRenderThread.getHandler();
rh.sendZoomValue(mZoomBar.getProgress());
rh.sendSizeValue(mSizeBar.getProgress());
rh.sendRotateValue(mRotateBar.getProgress());
if (sSurfaceHolder != null) {
Log.d(TAG, "Sending previous surface");
rh.sendSurfaceAvailable(sSurfaceHolder, false);
} else {
Log.d(TAG, "No previous surface");
}
Log.d(TAG, "onResume END");
}
@Override
protected void onPause() {
Log.d(TAG, "onPause BEGIN");
super.onPause();
RenderHandler rh = mRenderThread.getHandler();
rh.sendShutdown();
try {
mRenderThread.join();
} catch (InterruptedException ie) {
// not expected
throw new RuntimeException("join was interrupted", ie);
}
mRenderThread = null;
Log.d(TAG, "onPause END");
}
@Override // SurfaceHolder.Callback
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "surfaceCreated holder=" + holder + " (static=" + sSurfaceHolder + ")");
if (sSurfaceHolder != null) {
throw new RuntimeException("sSurfaceHolder is already set");
}
sSurfaceHolder = holder;
if (mRenderThread != null) {
// Normal case -- render thread is running, tell it about the new surface.
RenderHandler rh = mRenderThread.getHandler();
rh.sendSurfaceAvailable(holder, true);
} else {
// Sometimes see this on 4.4.x N5: power off, power on, unlock, with device in
// landscape and a lock screen that requires portrait. The surface-created
// message is showing up after onPause().
//
// Chances are good that the surface will be destroyed before the activity is
// unpaused, but we track it anyway. If the activity is un-paused and we start
// the RenderThread, the SurfaceHolder will be passed in right after the thread
// is created.
Log.d(TAG, "render thread not running");
}
recorder.setPreviewDisplay(holder.getSurface());
}
@Override // SurfaceHolder.Callback
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "surfaceChanged fmt=" + format + " size=" + width + "x" + height +
" holder=" + holder);
if (mRenderThread != null) {
RenderHandler rh = mRenderThread.getHandler();
rh.sendSurfaceChanged(format, width, height);
} else {
Log.d(TAG, "Ignoring surfaceChanged");
return;
}
}
@Override // SurfaceHolder.Callback
public void surfaceDestroyed(SurfaceHolder holder) {
// In theory we should tell the RenderThread that the surface has been destroyed.
if (mRenderThread != null) {
RenderHandler rh = mRenderThread.getHandler();
rh.sendSurfaceDestroyed();
}
Log.d(TAG, "surfaceDestroyed holder=" + holder);
sSurfaceHolder = null;
}
@Override // SeekBar.OnSeekBarChangeListener
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (mRenderThread == null) {
// Could happen if we programmatically update the values after setting a listener
// but before starting the thread. Also, easy to cause this by scrubbing the seek
// bar with one finger then tapping "recents" with another.
Log.w(TAG, "Ignoring onProgressChanged received w/o RT running");
return;
}
RenderHandler rh = mRenderThread.getHandler();
// "progress" ranges from 0 to 100
if (seekBar == mZoomBar) {
//Log.v(TAG, "zoom: " + progress);
rh.sendZoomValue(progress);
} else if (seekBar == mSizeBar) {
//Log.v(TAG, "size: " + progress);
rh.sendSizeValue(progress);
} else if (seekBar == mRotateBar) {
//Log.v(TAG, "rotate: " + progress);
rh.sendRotateValue(progress);
} else {
throw new RuntimeException("unknown seek bar");
}
// If we're getting preview frames quickly enough we don't really need this, but
// we don't want to have chunky-looking resize movement if the camera is slow.
// OTOH, if we get the updates too quickly (60fps camera?), this could jam us
// up and cause us to run behind. So use with caution.
rh.sendRedraw();
}
@Override // SeekBar.OnSeekBarChangeListener
public void onStartTrackingTouch(SeekBar seekBar) {}
@Override // SeekBar.OnSeekBarChangeListener
public void onStopTrackingTouch(SeekBar seekBar) {}
@Override
/**
* Handles any touch events that aren't grabbed by one of the controls.
*/
public boolean onTouchEvent(MotionEvent e) {
float x = e.getX();
float y = e.getY();
switch (e.getAction()) {
case MotionEvent.ACTION_MOVE:
case MotionEvent.ACTION_DOWN:
//Log.v(TAG, "onTouchEvent act=" + e.getAction() + " x=" + x + " y=" + y);
if (mRenderThread != null) {
RenderHandler rh = mRenderThread.getHandler();
rh.sendPosition((int) x, (int) y);
// Forcing a redraw can cause sluggish-looking behavior if the touch
// events arrive quickly.
//rh.sendRedraw();
}
break;
default:
break;
}
return true;
}
/**
* Updates the current state of the controls.
*/
private void updateControls() {
String str = getString(R.string.tfcCameraParams, mCameraPreviewWidth,
mCameraPreviewHeight, mCameraPreviewFps);
TextView tv = (TextView) findViewById(R.id.tfcCameraParams_text);
tv.setText(str);
str = getString(R.string.tfcRectSize, mRectWidth, mRectHeight);
tv = (TextView) findViewById(R.id.tfcRectSize_text);
tv.setText(str);
str = getString(R.string.tfcZoomArea, mZoomWidth, mZoomHeight);
tv = (TextView) findViewById(R.id.tfcZoomArea_text);
tv.setText(str);
}
@Override
public void onClick(View view) {
if (recording) {
recorder.stop();
recording = false;
// Let's initRecorder so we can record again
initRecorder();
prepareRecorder();
} else {
recording = true;
recorder.start();
}
}
private void initRecorder() {
recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
recorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
CamcorderProfile cpHigh = CamcorderProfile
.get(CamcorderProfile.QUALITY_HIGH);
recorder.setProfile(cpHigh);
String path = Environment.getExternalStorageDirectory() + File.separator
+ Environment.DIRECTORY_DCIM + File.separator + "AlphaRun";
recorder.setOutputFile(path);
recorder.setMaxDuration(50000); // 50 seconds
recorder.setMaxFileSize(5000000); // Approximately 5 megabytes
}
private void prepareRecorder() {
try {
recorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
finish();
} catch (IOException e) {
e.printStackTrace();
finish();
}
}
/**
* Thread that handles all rendering and camera operations.
*/
private static class RenderThread extends Thread implements
SurfaceTexture.OnFrameAvailableListener {
// Object must be created on render thread to get correct Looper, but is used from
// UI thread, so we need to declare it volatile to ensure the UI thread sees a fully
// constructed object.
private volatile RenderHandler mHandler;
// Used to wait for the thread to start.
private Object mStartLock = new Object();
private boolean mReady = false;
private MainHandler mMainHandler;
private Camera mCamera;
private int mCameraPreviewWidth, mCameraPreviewHeight;
private EglCore mEglCore;
private WindowSurface mWindowSurface;
private int mWindowSurfaceWidth;
private int mWindowSurfaceHeight;
// Receives the output from the camera preview.
private SurfaceTexture mCameraTexture;
// Orthographic projection matrix.
private float[] mDisplayProjectionMatrix = new float[16];
private Texture2dProgram mTexProgram;
private final ScaledDrawable2d mRectDrawable =
new ScaledDrawable2d(Drawable2d.Prefab.RECTANGLE);
private final Sprite2d mRect = new Sprite2d(mRectDrawable);
private int mZoomPercent = DEFAULT_ZOOM_PERCENT;
private int mSizePercent = DEFAULT_SIZE_PERCENT;
private int mRotatePercent = DEFAULT_ROTATE_PERCENT;
private float mPosX, mPosY;
/**
* Constructor. Pass in the MainHandler, which allows us to send stuff back to the
* Activity.
*/
public RenderThread(MainHandler handler) {
mMainHandler = handler;
}
/**
* Thread entry point.
*/
@Override
public void run() {
Looper.prepare();
// We need to create the Handler before reporting ready.
mHandler = new RenderHandler(this);
synchronized (mStartLock) {
mReady = true;
mStartLock.notify(); // signal waitUntilReady()
}
// Prepare EGL and open the camera before we start handling messages.
mEglCore = new EglCore(null, 0);
openCamera(REQ_CAMERA_WIDTH, REQ_CAMERA_HEIGHT, REQ_CAMERA_FPS);
Looper.loop();
Log.d(TAG, "looper quit");
releaseCamera();
releaseGl();
mEglCore.release();
synchronized (mStartLock) {
mReady = false;
}
}
/**
* Waits until the render thread is ready to receive messages.
* <p>
* Call from the UI thread.
*/
public void waitUntilReady() {
synchronized (mStartLock) {
while (!mReady) {
try {
mStartLock.wait();
} catch (InterruptedException ie) { /* not expected */ }
}
}
}
/**
* Shuts everything down.
*/
private void shutdown() {
Log.d(TAG, "shutdown");
Looper.myLooper().quit();
}
/**
* Returns the render thread's Handler. This may be called from any thread.
*/
public RenderHandler getHandler() {
return mHandler;
}
/**
* Handles the surface-created callback from SurfaceView. Prepares GLES and the Surface.
*/
private void surfaceAvailable(SurfaceHolder holder, boolean newSurface) {
Surface surface = holder.getSurface();
mWindowSurface = new WindowSurface(mEglCore, surface, false);
mWindowSurface.makeCurrent();
// Create and configure the SurfaceTexture, which will receive frames from the
// camera. We set the textured rect's program to render from it.
mTexProgram = new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT);
int textureId = mTexProgram.createTextureObject();
mCameraTexture = new SurfaceTexture(textureId);
mRect.setTexture(textureId);
if (!newSurface) {
// This Surface was established on a previous run, so no surfaceChanged()
// message is forthcoming. Finish the surface setup now.
//
// We could also just call this unconditionally, and perhaps do an unnecessary
// bit of reallocating if a surface-changed message arrives.
mWindowSurfaceWidth = mWindowSurface.getWidth();
mWindowSurfaceHeight = mWindowSurface.getWidth();
finishSurfaceSetup();
}
mCameraTexture.setOnFrameAvailableListener(this);
}
/**
* Releases most of the GL resources we currently hold (anything allocated by
* surfaceAvailable()).
* </p><p>
* Does not release EglCore.
*/
private void releaseGl() {
GlUtil.checkGlError("releaseGl start");
if (mWindowSurface != null) {
mWindowSurface.release();
mWindowSurface = null;
}
if (mTexProgram != null) {
mTexProgram.release();
mTexProgram = null;
}
GlUtil.checkGlError("releaseGl done");
mEglCore.makeNothingCurrent();
}
/**
* Handles the surfaceChanged message.
* </p><p>
* We always receive surfaceChanged() after surfaceCreated(), but surfaceAvailable()
* could also be called with a Surface created on a previous run. So this may not
* be called.
*/
private void surfaceChanged(int width, int height) {
Log.d(TAG, "RenderThread surfaceChanged " + width + "x" + height);
mWindowSurfaceWidth = width;
mWindowSurfaceHeight = width;
finishSurfaceSetup();
}
/**
* Handles the surfaceDestroyed message.
*/
private void surfaceDestroyed() {
// In practice this never appears to be called -- the activity is always paused
// before the surface is destroyed. In theory it could be called though.
Log.d(TAG, "RenderThread surfaceDestroyed");
releaseGl();
}
/**
* Sets up anything that depends on the window size.
* </p><p>
* Open the camera (to set mCameraAspectRatio) before calling here.
*/
private void finishSurfaceSetup() {
int width = mWindowSurfaceWidth;
int height = mWindowSurfaceHeight;
Log.d(TAG, "finishSurfaceSetup size=" + width + "x" + height +
" camera=" + mCameraPreviewWidth + "x" + mCameraPreviewHeight);
// Use full window.
GLES20.glViewport(0, 700, width, height);
// Simple orthographic projection, with (0,0) in lower-left corner.
Matrix.orthoM(mDisplayProjectionMatrix, 0, 0, width, 0, height, -1, 1);
// Default position is center of screen.
mPosX = width / 2.0f;
mPosY = height / 2.0f;
updateGeometry();
// Ready to go, start the camera.
Log.d(TAG, "starting camera preview");
try {
mCamera.setPreviewTexture(mCameraTexture);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
mCamera.startPreview();
}
/**
* Updates the geometry of mRect, based on the size of the window and the current
* values set by the UI.
*/
private void updateGeometry() {
int width = mWindowSurfaceWidth;
int height = mWindowSurfaceHeight;
int smallDim = Math.min(width, height);
// Max scale is a bit larger than the screen, so we can show over-size.
float scaled = smallDim * (mSizePercent / 100.0f) * 1.25f;
float cameraAspect = (float) mCameraPreviewWidth / mCameraPreviewHeight;
int newWidth = Math.round(scaled * cameraAspect);
int newHeight = Math.round(scaled);
float zoomFactor = 1.0f - (mZoomPercent / 100.0f);
int rotAngle = Math.round(360 * (mRotatePercent / 100.0f));
mRect.setScale(newWidth, newHeight);
mRect.setPosition(mPosX, mPosY);
mRect.setRotation(rotAngle);
mRectDrawable.setScale(zoomFactor);
mMainHandler.sendRectSize(newWidth, newHeight);
mMainHandler.sendZoomArea(Math.round(mCameraPreviewWidth * zoomFactor),
Math.round(mCameraPreviewHeight * zoomFactor));
mMainHandler.sendRotateDeg(rotAngle);
}
@Override // SurfaceTexture.OnFrameAvailableListener; runs on arbitrary thread
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
mHandler.sendFrameAvailable();
}
/**
* Handles incoming frame of data from the camera.
*/
private void frameAvailable() {
mCameraTexture.updateTexImage();
draw();
}
/**
* Draws the scene and submits the buffer.
*/
private void draw() {
GlUtil.checkGlError("draw start");
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
mRect.draw(mTexProgram, mDisplayProjectionMatrix);
mWindowSurface.swapBuffers();
GlUtil.checkGlError("draw done");
}
/**
* Opens a camera, and attempts to establish preview mode at the specified width
* and height with a fixed frame rate.
* </p><p>
* Sets mCameraPreviewWidth / mCameraPreviewHeight.
*/
private void openCamera(int desiredWidth, int desiredHeight, int desiredFps) {
if (mCamera != null) {
throw new RuntimeException("camera already initialized");
}
Camera.CameraInfo info = new Camera.CameraInfo();
// Try to find a front-facing camera (e.g. for videoconferencing).
int numCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numCameras; i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
mCamera = Camera.open(i);
break;
}
}
if (mCamera == null) {
Log.d(TAG, "No front-facing camera found; opening default");
mCamera = Camera.open(); // opens first back-facing camera
}
if (mCamera == null) {
throw new RuntimeException("Unable to open camera");
}
Camera.Parameters parms = mCamera.getParameters();
CameraUtils.choosePreviewSize(parms, desiredWidth, desiredHeight);
parms.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
// Try to set the frame rate to a constant value.
int thousandFps = CameraUtils.chooseFixedPreviewFps(parms, desiredFps * 1000);
// Give the camera a hint that we're recording video. This can have a big
// impact on frame rate.
parms.setRecordingHint(true);
mCamera.setParameters(parms);
int[] fpsRange = new int[2];
Camera.Size mCameraPreviewSize = parms.getPreviewSize();
parms.getPreviewFpsRange(fpsRange);
String previewFacts = mCameraPreviewSize.width + "x" + mCameraPreviewSize.height;
if (fpsRange[0] == fpsRange[1]) {
previewFacts += " @" + (fpsRange[0] / 1000.0) + "fps";
} else {
previewFacts += " @[" + (fpsRange[0] / 1000.0) +
" - " + (fpsRange[1] / 1000.0) + "] fps";
}
Log.i(TAG, "Camera config: " + previewFacts);
mCameraPreviewWidth = mCameraPreviewSize.width;
mCameraPreviewHeight = mCameraPreviewSize.height;
mMainHandler.sendCameraParams(mCameraPreviewWidth, mCameraPreviewHeight,
thousandFps / 1000.0f);
}
/**
* Stops camera preview, and releases the camera to the system.
*/
private void releaseCamera() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
Log.d(TAG, "releaseCamera -- done");
}
}
}
}
</p>