
Recherche avancée
Autres articles (103)
-
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Menus personnalisés
14 novembre 2010, parMediaSPIP utilise le plugin Menus pour gérer plusieurs menus configurables pour la navigation.
Cela permet de laisser aux administrateurs de canaux la possibilité de configurer finement ces menus.
Menus créés à l’initialisation du site
Par défaut trois menus sont créés automatiquement à l’initialisation du site : Le menu principal ; Identifiant : barrenav ; Ce menu s’insère en général en haut de la page après le bloc d’entête, son identifiant le rend compatible avec les squelettes basés sur Zpip ; (...) -
Organiser par catégorie
17 mai 2013, parDans MédiaSPIP, une rubrique a 2 noms : catégorie et rubrique.
Les différents documents stockés dans MédiaSPIP peuvent être rangés dans différentes catégories. On peut créer une catégorie en cliquant sur "publier une catégorie" dans le menu publier en haut à droite ( après authentification ). Une catégorie peut être rangée dans une autre catégorie aussi ce qui fait qu’on peut construire une arborescence de catégories.
Lors de la publication prochaine d’un document, la nouvelle catégorie créée sera proposée (...)
Sur d’autres sites (16329)
-
How to simultaneously capture mic, stream it to RTSP server and play it on iPhone's speaker ?
24 août 2021, par Norbert TowiańskiI want to capture sound from mic, stream it to RTSP server and play it simultaneously on iPhone's speaker after getting samples from RTSP server. I mean such kind of loop. I use FFMPEGKit and I want to use MobileVLCKit, but unfortunately microphone is off when I start play stream.
I think I've done first step (capturing from microphone and send OutputStream to RTSP server) :


@IBAction func transmitBtnPressed(_ sender: Any) {
 ffmpeg_transmit()
}

@IBAction func recordBtnPressed(_ sender: Any) {
 switch recordingState {
 case .idle:
 recordingState = .start
 startRecording()
 recordBtn.setTitle("Started", for: .normal)
 let urlToFile = URL(fileURLWithPath: outPipePath!)
 outputStream = OutputStream(url: urlToFile, append: false)
 outputStream!.open()
 case .capturing:
 recordingState = .end
 stopRecording()
 recordBtn.setTitle("End", for: .normal)
 default:
 break
 }
}

override func viewDidLoad() {
 super.viewDidLoad()
 outPipePath = FFmpegKitConfig.registerNewFFmpegPipe()
 self.setup()
}

override func viewDidAppear(_ animated: Bool) {
 super.viewDidAppear(animated)
 setUpAuthStatus()
}

func setUpAuthStatus() {
 if AVCaptureDevice.authorizationStatus(for: AVMediaType.audio) != .authorized {
 AVCaptureDevice.requestAccess(for: AVMediaType.audio, completionHandler: { (authorized) in
 DispatchQueue.main.async {
 if authorized {
 self.setup()
 }
 }
 })
 }
}

func setup() {
 self.session.sessionPreset = AVCaptureSession.Preset.high
 
 self.recordingURL = URL(fileURLWithPath: "\(NSTemporaryDirectory() as String)/file.m4a")
 if self.fileManager.isDeletableFile(atPath: self.recordingURL!.path) {
 _ = try? self.fileManager.removeItem(atPath: self.recordingURL!.path)
 }
 
 self.assetWriter = try? AVAssetWriter(outputURL: self.recordingURL!,
 fileType: AVFileType.m4a)
 self.assetWriter!.movieFragmentInterval = CMTime.invalid
 self.assetWriter!.shouldOptimizeForNetworkUse = true
 
 let audioSettings = [
 AVFormatIDKey: kAudioFormatLinearPCM,
 AVSampleRateKey: 48000.0,
 AVNumberOfChannelsKey: 1,
 AVLinearPCMIsFloatKey: false,
 AVLinearPCMBitDepthKey: 16,
 AVLinearPCMIsBigEndianKey: false,
 AVLinearPCMIsNonInterleaved: false,
 
 ] as [String : Any]
 
 
 self.audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,
 outputSettings: audioSettings)
 
 self.audioInput?.expectsMediaDataInRealTime = true
 
 if self.assetWriter!.canAdd(self.audioInput!) {
 self.assetWriter?.add(self.audioInput!)
 }
 
 self.session.startRunning()
 
 DispatchQueue.main.async {
 self.session.beginConfiguration()
 
 self.session.commitConfiguration()
 
 let audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)
 let audioIn = try? AVCaptureDeviceInput(device: audioDevice!)
 
 if self.session.canAddInput(audioIn!) {
 self.session.addInput(audioIn!)
 }
 
 if self.session.canAddOutput(self.audioOutput) {
 self.session.addOutput(self.audioOutput)
 }
 
 self.audioConnection = self.audioOutput.connection(with: AVMediaType.audio)
 }
}

func startRecording() {
 if self.assetWriter?.startWriting() != true {
 print("error: \(self.assetWriter?.error.debugDescription ?? "")")
 }
 
 self.audioOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
}

func stopRecording() {
 self.audioOutput.setSampleBufferDelegate(nil, queue: nil)
 
 self.assetWriter?.finishWriting {
 print("Saved in folder \(self.recordingURL!)")
 }
}
func captureOutput(_ captureOutput: AVCaptureOutput, didOutput
 sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
 
 if !self.isRecordingSessionStarted {
 let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
 self.assetWriter?.startSession(atSourceTime: presentationTime)
 self.isRecordingSessionStarted = true
 recordingState = .capturing
 }
 
 var blockBuffer: CMBlockBuffer?
 var audioBufferList: AudioBufferList = AudioBufferList.init()
 
 CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, bufferListSizeNeededOut: nil, bufferListOut: &audioBufferList, bufferListSize: MemoryLayout<audiobufferlist>.size, blockBufferAllocator: nil, blockBufferMemoryAllocator: nil, flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, blockBufferOut: &blockBuffer)
 let buffers = UnsafeMutableAudioBufferListPointer(&audioBufferList)
 
 for buffer in buffers {
 let u8ptr = buffer.mData!.assumingMemoryBound(to: UInt8.self)
 let output = outputStream!.write(u8ptr, maxLength: Int(buffer.mDataByteSize))
 
 if (output == -1) {
 let error = outputStream?.streamError
 print("\(#file) > \(#function) > Error on outputStream: \(error!.localizedDescription)")
 }
 else {
 print("\(#file) > \(#function) > Data sent")
 }
 }
}

func ffmpeg_transmit() {
 
 let cmd1: String = "-f s16le -ar 48000 -ac 1 -i "
 let cmd2: String = " -probesize 32 -analyzeduration 0 -c:a libopus -application lowdelay -ac 1 -ar 48000 -f rtsp -rtsp_transport udp rtsp://localhost:18556/mystream"
 let cmd = cmd1 + outPipePath! + cmd2
 
 print(cmd)
 
 ffmpegSession = FFmpegKit.executeAsync(cmd, withExecuteCallback: { ffmpegSession in
 
 let state = ffmpegSession?.getState()
 let returnCode = ffmpegSession?.getReturnCode()
 if let returnCode = returnCode, let get = ffmpegSession?.getFailStackTrace() {
 print("FFmpeg process exited with state \(String(describing: FFmpegKitConfig.sessionState(toString: state!))) and rc \(returnCode).\(get)")
 }
 }, withLogCallback: { log in
 
 }, withStatisticsCallback: { statistics in
 
 })
}
</audiobufferlist>


I want to use MobileVLCKit in that way :


func startStream(){
 guard let url = URL(string: "rtsp://localhost:18556/mystream") else {return}
 audioPlayer!.media = VLCMedia(url: url)

 audioPlayer!.media.addOption( "-vv")
 audioPlayer!.media.addOption( "--network-caching=10000")

 audioPlayer!.delegate = self
 audioPlayer!.audio.volume = 100

 audioPlayer!.play()

}



Could you give me some hints how to implement that ?


-
How to trim video in Android after camera capture ?
2 juillet 2017, par Sathwik GangisettyI have an app that captures images and videos. I’m able to implement cropping picture, but I cannot implement trimming video. I tried using mp4parser and ffmpeg but I cannot make it work. Can anyone suggest any tutorial to do it or please have a look at my mainActivity.java code and suggest me what to do ? I even tried K4LvideoTrimmer but couldn’t make it work as GitHub documentation is not clear.
Here is my MainActivity.java file.
package com.example.sathwik.uploadtrail1;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import android.app.Activity;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import com.yalantis.ucrop.UCrop;
import life.knowledge4.videotrimmer.K4LVideoTrimmer;
import static android.R.attr.path;
public class MainActivity extends AppCompatActivity {
// LogCat tag
private static final String TAG = MainActivity.class.getSimpleName();
// Camera activity request codes
private static final int CAMERA_CAPTURE_IMAGE_REQUEST_CODE = 100;
private static final int CAMERA_CAPTURE_VIDEO_REQUEST_CODE = 200;
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
final int PIC_CROP = 3;
final int VIDEO_TRIM = 4;
private File outputFile = null;
// public static final int REQUEST_CROP = UCrop.REQUEST_CROP;
private Uri fileUri; // file url to store image/video
// private Uri fileUriCrop;
private Button btnCapturePicture, btnRecordVideo;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Changing action bar background color
//getActionBar().setBackgroundDrawable(new ColorDrawable(Color.parseColor(getResources().getString(R.color.action_bar))));
btnCapturePicture = (Button) findViewById(R.id.btnCapturePicture);
btnRecordVideo = (Button) findViewById(R.id.btnRecordVideo);
/**
* Capture image button click event
*/
btnCapturePicture.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
captureImage();
}
});
/**
Record video button click event
*/
btnRecordVideo.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// record video
recordVideo();
}
});
// Checking camera availability
if (!isDeviceSupportCamera()) {
Toast.makeText(getApplicationContext(),
"Sorry! Your device doesn't support camera",
Toast.LENGTH_LONG).show();
// will close the app if the device does't have camera
finish();
}
}
/**
checking device has camera hardware or not
*/
private boolean isDeviceSupportCamera() {
if (getApplicationContext().getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
/**
* Launching camera app to capture image
*/
private void captureImage() {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
fileUri = getOutputMediaFileUri(MEDIA_TYPE_IMAGE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
// start the image capture Intent
startActivityForResult(intent, CAMERA_CAPTURE_IMAGE_REQUEST_CODE);
}
/**
* Launching camera app to record video
*/
private void recordVideo() {
Intent intent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
fileUri = getOutputMediaFileUri(MEDIA_TYPE_VIDEO);
// set video quality
intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1);
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);// set the image file
// name
// start the video capture Intent
startActivityForResult(intent, CAMERA_CAPTURE_VIDEO_REQUEST_CODE);
}
/* *//** Perform UCrop *//*
public void performUcrop(){
UCrop.of(fileUri, fileUri).start(this);
}*/
private void performcrop(){
// UCrop.of(fileUri, fileUri).start(this);
Intent cropIntent = new Intent("com.android.camera.action.CROP");
cropIntent.setDataAndType(fileUri, "image/*");
cropIntent.putExtra("crop", "true");
cropIntent.putExtra("aspectX", 1);
cropIntent.putExtra("aspectY", 1);
cropIntent.putExtra("outputX", 256);
cropIntent.putExtra("outputY", 256);
cropIntent.putExtra("return-data", true);
//cropIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(outputFile));
startActivityForResult(cropIntent, PIC_CROP);
}
/**
* Here we store the file url as it will be null after returning from camera
* app
*/
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// save file url in bundle as it will be null on screen orientation
// changes
outState.putParcelable("file_uri", fileUri);
}
@Override
protected void onRestoreInstanceState (@NonNull Bundle savedInstanceState){
super.onRestoreInstanceState(savedInstanceState);
// get the file url
fileUri = savedInstanceState.getParcelable("file_uri");
}
/**
* Receiving activity result method will be called after closing the camera
* */
@Override
protected void onActivityResult ( int requestCode, int resultCode, Intent data){
// if the result is capturing Image
if (requestCode == CAMERA_CAPTURE_IMAGE_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
// successfully captured the image
//Log.d("ucrop", "error log" + fileUri);
// performUcrop();
//final Uri fileUri = UCrop.getOutput(data);
//fileUri = data.getData();
performcrop();
// launching upload activity
} else if (resultCode == RESULT_CANCELED) {
// user cancelled Image capture
Toast.makeText(getApplicationContext(),
"Sorry! Failed to capture image", Toast.LENGTH_SHORT).show();
}
} else if (requestCode == CAMERA_CAPTURE_VIDEO_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
// video successfully recorded
//launching upload activity
//videotrim();
launchUploadActivity(false);
} else if (resultCode == RESULT_CANCELED) {
// user cancelled recording
Toast.makeText(getApplicationContext(), "Sorry! Failed to record video", Toast.LENGTH_SHORT).show();
}
}
else if (requestCode == PIC_CROP) {
if (resultCode == RESULT_OK) {
//fileUri = Uri.fromFile(outputFile);
launchUploadActivity(true);
}
}
}
private void launchUploadActivity(boolean isImage){
Intent i = new Intent(MainActivity.this, UploadActivity.class);
i.putExtra("filePath", fileUri.getPath());
i.putExtra("isImage", isImage);
startActivity(i);
}
/**
* Creating file uri to store image/video
*/
public Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
/**
* returning image / video
*/
private static File getOutputMediaFile(int type) {
// External sdcard location
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
Config.IMAGE_DIRECTORY_NAME);
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d(TAG, "Oops! Failed create "+ Config.IMAGE_DIRECTORY_NAME + " directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",Locale.getDefault()).format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator+ "IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator+ "VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
//Logout function
private void logout(){
//Creating an alert dialog to confirm logout
android.support.v7.app.AlertDialog.Builder alertDialogBuilder = new android.support.v7.app.AlertDialog.Builder(this);
alertDialogBuilder.setMessage("Are you sure you want to logout?");
alertDialogBuilder.setPositiveButton("Yes",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface arg0, int arg1) {
//Getting out sharedpreferences
SharedPreferences preferences = getSharedPreferences(Config.SHARED_PREF_NAME, Context.MODE_PRIVATE);
//Getting editor
SharedPreferences.Editor editor = preferences.edit();
//Puting the value false for loggedin
editor.putBoolean(Config.LOGGEDIN_SHARED_PREF, false);
//Putting blank value to email
editor.putString(Config.EMAIL_SHARED_PREF, "");
//Saving the sharedpreferences
editor.commit();
//Starting login activity
Intent intent = new Intent(MainActivity.this, LoginActivity.class);
startActivity(intent);
}
});
alertDialogBuilder.setNegativeButton("No",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface arg0, int arg1) {
}
});
//Showing the alert dialog
android.support.v7.app.AlertDialog alertDialog = alertDialogBuilder.create();
alertDialog.show();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.menuLogout) {
logout();
}
return super.onOptionsItemSelected(item);
}
} -
OpenAI Whisper : FileNotFoundError : [WinError 2] The system cannot find the file specified
16 décembre 2023, par AlexI'm not the first to get this error, however, all previous answers relate to ffmpeg not installed. As you can see, I installed ffmpeg so not sure why i still get this error. Moreover, this code works perfectly fine in Google Colab but not in Spyder.


pip install tiktoken

pip install -U openai-whisper

pip install ffmpeg

pip install setuptools-rust

import whisper

model = whisper.load_model("medium")

result = model.transcribe("F:/iim/dahl/01.Projects/20231215_Schlueter/02.Code/01.Data/The Investors Everybody Ignored ft Sallie Krawcheck Whats Your Problem Jacob Goldstein.mp3")
print(result["text"])



 Cell In[13], line 1
 result = model.transcribe("F:/iim/dahl/01.Projects/20231215_Schlueter/02.Code/01.Data/The Investors Everybody Ignored ft Sallie Krawcheck Whats Your Problem Jacob Goldstein.mp3/")

 File F:\iim\dahl\00.Anaconda\Lib\site-packages\whisper\transcribe.py:122 in transcribe
 mel = log_mel_spectrogram(audio, model.dims.n_mels, padding=N_SAMPLES)

 File F:\iim\dahl\00.Anaconda\Lib\site-packages\whisper\audio.py:140 in log_mel_spectrogram
 audio = load_audio(audio)

 File F:\iim\dahl\00.Anaconda\Lib\site-packages\whisper\audio.py:58 in load_audio
 out = run(cmd, capture_output=True, check=True).stdout

 File F:\iim\dahl\00.Anaconda\Lib\subprocess.py:548 in run
 with Popen(*popenargs, **kwargs) as process:

 File F:\iim\dahl\00.Anaconda\Lib\site-packages\spyder_kernels\customize\spydercustomize.py:109 in __init__
 super(SubprocessPopen, self).__init__(*args, **kwargs)

 File F:\iim\dahl\00.Anaconda\Lib\subprocess.py:1026 in __init__
 self._execute_child(args, executable, preexec_fn, close_fds,

 File F:\iim\dahl\00.Anaconda\Lib\subprocess.py:1538 in _execute_child
 hp, ht, pid, tid = _winapi.CreateProcess(executable, args,

FileNotFoundError: [WinError 2] Das System kann die angegebene Datei nicht finden