
Recherche avancée
Médias (91)
-
Spoon - Revenge !
15 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
My Morning Jacket - One Big Holiday
15 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Zap Mama - Wadidyusay ?
15 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
David Byrne - My Fair Lady
15 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Beastie Boys - Now Get Busy
15 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Granite de l’Aber Ildut
9 septembre 2011, par
Mis à jour : Septembre 2011
Langue : français
Type : Texte
Autres articles (74)
-
MediaSPIP 0.1 Beta version
25 avril 2011, parMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
MediaSPIP version 0.1 Beta
16 avril 2011, parMediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
Sur d’autres sites (12638)
-
How to queue ffmpeg jobs for transcoding ?
25 juillet 2019, par sujit patelThis scripts below check
ftp
fro any media file and starts transcoding usingffmpeg
. Problem is it starts so manyffmpeg
process simultaneously. since so manyffmpeg
process running servers becomes too slow and takes heavy amount of time to transcodes videos. Sometimes server stops working.How to put jobs in a queue ?
#!/usr/bin/env python3
import os, sys, time, threading, subprocess
import logging
from config import MEDIA_SERVER, MEDIA_SERVER_USERNAME, MEDIA_DIRS, LOCAL_MEDIA_DIR_ROOT, TRANSCODING_SERVER, TRANSCODING_SERVER_USERNAME, RSYNC_SERVER, RSYNC_USERNAME, RSYNC_DIR, PROCESSING_DIR, PROCESSING_GPU_SCRIPT, PROCESSING_CPU_SCRIPT, EMAIL_SEND_TO, EMAIL_SEND_FROM
from send_email import sendEmail
import sqlite3
logger = logging.getLogger(__name__)
class FuncThread(threading.Thread):
def __init__(self, target, *args):
self._targett = target
self._argst = args
threading.Thread.__init__(self)
def run(self):
self._targett(*self._argst)
class Automator(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.sleepTime=60
self.lastSMILFileCheckTime = 0
self.newlastSMILFileCheckTime = 0
self.lastMediaFileCheckTime = 0
self.newLastMediaFileCheckTime = 0
self.db = None
self.fluid_threads = []
self.scriptRoot = os.path.dirname(os.path.realpath(__file__))
def fluid_thread_add(self, thd):
self.fluid_threads.append(thd)
thd.start()
def processFluidThreads(self):
fluid_cond = [x.is_alive() for x in self.fluid_threads]
finished_threads = [x for x, y in zip(self.fluid_threads, fluid_cond) if not y]
self.fluid_threads = [x for x, y in zip(self.fluid_threads, fluid_cond) if y]
if len(finished_threads) > 0:
logger.info('Fluid threads finished: %s (joining on them now...)' % str(len(finished_threads)))
[thd.join() for thd in finished_threads]
logger.info('Joined finished threads successfully')
if len(self.fluid_threads) > 0:
logger.info('Fluid threads remaining: %s' % str(len(self.fluid_threads)))
def run(self):
self.setupDB()
self.fetchlastCheckTime()
while True:
self.process()
time.sleep(self.sleepTime)
def process(self):
logger.debug("process")
try:
self.handleNewSMILFiles()
self.rsyncFromRemote()
self.handleNewSourceFiles()
# fluid_thread_add(FuncThread(start_single_task, user, task_name, task_path, selected_region))
self.processFluidThreads()
self.updatelastCheckTimes()
except Exception as e:
print(e)
logger.error("Something went wrong while running this")
def handleNewSourceFiles(self):
logger.info("Looking for medial files since " + str(self.lastMediaFileCheckTime))
for root, dirs, filenames in os.walk(PROCESSING_DIR):
for subdir in dirs:
pass
for f in filenames:
if (f.lower().endswith("complete")):
file_path = os.path.join(root, f)
mod_time = self.modification_date(file_path)
if (mod_time > self.lastMediaFileCheckTime):
logger.info("Found a new media File " + file_path)
relDir = os.path.relpath(root, PROCESSING_DIR)
f_name = root.split("/")[-1]
new_output_localdir = os.path.join(LOCAL_MEDIA_DIR_ROOT, relDir)
new_output_localdir = os.path.abspath(os.path.join(new_output_localdir, os.pardir))
new_output_remotedir = new_output_localdir
if new_output_remotedir.startswith(LOCAL_MEDIA_DIR_ROOT):
new_output_remotedir = new_output_remotedir[len(LOCAL_MEDIA_DIR_ROOT):]
if(self.startATranscodingThread(root, new_output_localdir, new_output_remotedir, f_name+".mp4")):
if(mod_time > self.newLastMediaFileCheckTime):
self.newLastMediaFileCheckTime = mod_time
def startATranscodingThread(self, inputFile, outputLocalDIR, outputRemoteDIR, fileName):
self.fluid_thread_add(FuncThread(self.runTranscoder, inputFile, outputLocalDIR, outputRemoteDIR, fileName, MEDIA_SERVER))
return True
def handleNewSMILFiles(self):
if (MEDIA_SERVER != TRANSCODING_SERVER):
logger.info("Media server is separate, fetching last 24 hours SMIL files from " + MEDIA_SERVER)
self.rsyncSMILFiles()
logger.info("Looking for SMIL files since " + str(self.lastSMILFileCheckTime) + " in " + LOCAL_MEDIA_DIR_ROOT)
for root, dirs, filenames in os.walk(LOCAL_MEDIA_DIR_ROOT):
for subdir in dirs:
pass
for f in filenames:
file_path = os.path.join(root, f)
if (f.lower().endswith("stream.smil")):
file_path = os.path.join(root, f)
mod_time = self.modification_date(file_path)
if(mod_time > self.lastSMILFileCheckTime):
logger.info("Found a new SMIL File " + file_path)
relDir = os.path.relpath(root, LOCAL_MEDIA_DIR_ROOT)
f = f.split(".")[0]
new_dir_name = os.path.splitext(os.path.basename(f))[0]
new_dir_name = os.path.join(relDir, new_dir_name)
if(self.createARemoteDirectory(new_dir_name)):
if(mod_time > self.newlastSMILFileCheckTime):
self.newlastSMILFileCheckTime = mod_time
def modification_date(self, filename):
t = os.path.getmtime(filename)
return t
def createARemoteDirectory(self, dirName):
HOST = RSYNC_SERVER
DIR_NAME=RSYNC_DIR + "/" + dirName
COMMAND = "ssh {}@{} mkdir -p {}".format(RSYNC_USERNAME, RSYNC_SERVER, DIR_NAME)
logger.info("Going to execute :-- " + COMMAND)
rv = subprocess.check_call(COMMAND, shell=True)
return True
def rsyncSMILFiles(self):
HOST = RSYNC_SERVER
for MEDIA_DIR in MEDIA_DIRS:
epoch_time = int(time.time())
TEMP_FILE="/tmp/rsync_files.{}".format(epoch_time)
COMMAND = "ssh -o ConnectTimeout=10 {}@{} \"cd {} && find . -mtime -3 -name *.stream.smil > {} && rsync -azP --files-from={} . {}@{}:{}/{}\"".format(MEDIA_SERVER_USERNAME, MEDIA_SERVER, MEDIA_DIR, TEMP_FILE, TEMP_FILE, TRANSCODING_SERVER_USERNAME, TRANSCODING_SERVER, LOCAL_MEDIA_DIR_ROOT, MEDIA_DIR)
logger.info("Going to execute :-- " + COMMAND)
try:
rv = subprocess.check_call(COMMAND, shell=True)
except Exception as e:
logger.error("Unable to connect to media server")
return True
def rsyncFromRemote(self):
HOST = RSYNC_SERVER
COMMAND="rsync -azP --delete {}@{}:{} {} ".format(RSYNC_USERNAME, RSYNC_SERVER, RSYNC_DIR+"/", PROCESSING_DIR)
logger.info("Going to execute :-- " + COMMAND)
rv = subprocess.check_call(COMMAND, shell=True)
return True
def runTranscoder(self, inputDIR, outputLocalDIR, outputRemoteDIR, fileName, media_server):
HOST = RSYNC_SERVER
COMMAND="bash {} {} {} {} {} {}".format(PROCESSING_CPU_SCRIPT, inputDIR, outputLocalDIR, outputRemoteDIR, fileName, media_server)
# if (len(self.fluid_threads)>2):
# COMMAND="bash {} {} {} {} {} {}".format(PROCESSING_CPU_SCRIPT, inputDIR, outputLocalDIR, outputRemoteDIR, fileName, media_server)
logger.info("Going to execute :-- " + COMMAND)
#sendEmail(EMAIL_SEND_TO, EMAIL_SEND_FROM, "Transcoding started for file" + fileName.replace('_','-').replace('/','-'), outputRemoteDIR+fileName)
# sendEmail(EMAIL_SEND_TO, EMAIL_SEND_FROM, "Transcoding started for file" , outputRemoteDIR+fileName)
try:enter code here
rv = subprocess.check_call(COMMAND, shell=True)
# if (rv !=0):
# sendEmail(EMAIL_SEND_TO, EMAIL_SEND_FROM, "Transcoding Failed for a file", outputRemoteDIR+fileName)
except Exception as e:
logger.error("Transcoding Failed for a file :- " + outputRemoteDIR+fileName);
# sendEmail(EMAIL_SEND_TO, EMAIL_SEND_FROM, "Transcoding Failed for a file", outputRemoteDIR+fileName+"\n contact dev@example.com")
return True
def setupDB(self):
self.db = sqlite3.connect('automator.db', detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES)
sql = "create table if not exists last_smil_check (last_check_time int)"
self.db.execute(sql)
self.db.commit()
sql = "create table if not exists last_mediafile_check(last_check_time int)"
self.db.execute(sql)
self.db.commit()
def fetchlastCheckTime(self):
cursor = self.db.execute("select * from last_smil_check")
count = 0
for row in cursor:
logger.info(row)
count = count+1
self.lastSMILFileCheckTime = row[0]
self.newlastSMILFileCheckTime = self.lastSMILFileCheckTime
cursor = self.db.execute("select * from last_mediafile_check")
count = 0
for row in cursor:
logger.info(row)
count = count+1
self.lastMediaFileCheckTime = row[0]
self.newLastMediaFileCheckTime = self.lastMediaFileCheckTime
def updatelastCheckTimes(self):
self.lastSMILFileCheckTime = self.newlastSMILFileCheckTime
self.lastMediaFileCheckTime = self.newLastMediaFileCheckTime
cursor = self.db.execute("select * from last_smil_check")
count = 0
for row in cursor:
count = count +1
if(count == 0):
sql_query = "insert into last_smil_check values ({})".format(self.lastSMILFileCheckTime)
logger.info("Executing " + sql_query)
self.db.execute(sql_query)
else:
self.db.execute("update last_smil_check set last_check_time ={}".format(self.lastSMILFileCheckTime))
self.db.commit()
cursor = self.db.execute("select * from last_mediafile_check")
logger.info(cursor)
count = 0
for row in cursor:
count = count +1
if(count == 0):
sql_query = "insert into last_mediafile_check values ({})".format(self.lastMediaFileCheckTime)
logger.info("Executing " + sql_query)
self.db.execute(sql_query)
else:
sql_query = "update last_mediafile_check set last_check_time ={}".format(self.lastMediaFileCheckTime)
logger.info("Executing " + sql_query)
self.db.execute(sql_query)
self.db.commit()
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
automator = Automator()
automator.start()
enter code here -
Your introduction to personally identifiable information : What is PII ?
-
What is PII ? Your introduction to personally identifiable information