diff --git a/airtime_mvc/application/models/RabbitMq.php b/airtime_mvc/application/models/RabbitMq.php
index 98ed388f1..bef33c623 100644
--- a/airtime_mvc/application/models/RabbitMq.php
+++ b/airtime_mvc/application/models/RabbitMq.php
@@ -54,7 +54,7 @@ class Application_Model_RabbitMq
{
$md["event_type"] = $event_type;
- $exchange = 'airtime-media-monitor';
+ $exchange = 'airtime-analyzer';
$data = json_encode($md);
self::sendMessage($exchange, 'direct', true, $data);
}
diff --git a/airtime_mvc/application/models/Systemstatus.php b/airtime_mvc/application/models/Systemstatus.php
index dcb5c1dac..e417a2745 100644
--- a/airtime_mvc/application/models/Systemstatus.php
+++ b/airtime_mvc/application/models/Systemstatus.php
@@ -184,7 +184,7 @@ class Application_Model_Systemstatus
$ip = $component->getDbIp();
$docRoot = self::GetMonitStatus($ip);
- $data = self::ExtractServiceInformation($docRoot, "airtime-media-monitor");
+ $data = self::ExtractServiceInformation($docRoot, "airtime-analyzer");
return $data;
}
diff --git a/airtime_mvc/application/views/scripts/systemstatus/index.phtml b/airtime_mvc/application/views/scripts/systemstatus/index.phtml
index 9fa27d378..fb16caf29 100644
--- a/airtime_mvc/application/views/scripts/systemstatus/index.phtml
+++ b/airtime_mvc/application/views/scripts/systemstatus/index.phtml
@@ -82,7 +82,7 @@
Make sure RabbitMQ is installed correctly, and that your settings in /etc/airtime/airtime.conf
are correct. Try using sudo rabbitmqctl list_users and sudo rabbitmqctl list_vhosts
to see if the airtime user (or your custom RabbitMQ user) exists, then checking that
- sudo rabbitmqctl list_exchanges contains entries for airtime-media-monitor, airtime-pypo,
+ sudo rabbitmqctl list_exchanges contains entries for airtime-analyzer, airtime-pypo,
and airtime-uploads.
= 3.2-14) to ensure that this file is present
-# and status_of_proc is working.
-. /lib/lsb/init-functions
-
-start () {
- start-stop-daemon --start --background --quiet --chuid $USERID:$GROUPID \
- --make-pidfile --pidfile $PIDFILE --startas $DAEMON
-}
-
-stop () {
- # Send TERM after 5 seconds, wait at most 30 seconds.
- start-stop-daemon --stop --oknodo --retry TERM/5/0/30 --quiet --pidfile $PIDFILE
- rm -f $PIDFILE
-}
-
-case "${1:-''}" in
- 'start')
- # start commands here
- echo -n "Starting $NAME: "
- start
- echo "Done."
- ;;
- 'stop')
- # stop commands here
- echo -n "Stopping $NAME: "
- stop
- echo "Done."
- ;;
- 'restart')
- # restart commands here
- echo -n "Restarting $NAME: "
- stop
- start
- echo "Done."
- ;;
- 'force-reload')
- # reload commands here
- echo -n "Reloading $NAME: "
- stop
- start
- echo "Done."
- ;;
- 'status')
- status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $?
- ;;
- *) # no parameter specified
- echo "Usage: $SELF start|stop|restart|status"
- exit 1
- ;;
-esac
diff --git a/python_apps/media-monitor/install/upstart/airtime-media-monitor.conf.template b/python_apps/media-monitor/install/upstart/airtime-media-monitor.conf.template
deleted file mode 100644
index 0afe7ebc8..000000000
--- a/python_apps/media-monitor/install/upstart/airtime-media-monitor.conf.template
+++ /dev/null
@@ -1,15 +0,0 @@
-description "Airtime Media Monitor"
-author "help@sourcefabric.org"
-
-start on runlevel [2345]
-stop on runlevel [!2345]
-
-respawn
-
-setuid WEB_USER
-setgid WEB_USER
-
-env LANG='en_US.UTF-8'
-env LC_ALL='en_US.UTF-8'
-
-exec airtime-media-monitor
\ No newline at end of file
diff --git a/python_apps/media-monitor/media_monitor/__init__.py b/python_apps/media-monitor/media_monitor/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/python_apps/media-monitor/media_monitor/__main__.py b/python_apps/media-monitor/media_monitor/__main__.py
deleted file mode 100644
index 057eef53f..000000000
--- a/python_apps/media-monitor/media_monitor/__main__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import logging
-import locale
-import time
-import sys
-import os
-import mm2.mm2 as mm2
-from std_err_override import LogWriter
-locale.setlocale(locale.LC_ALL, '')
-
-def run():
- global_cfg = '/etc/airtime/airtime.conf'
- logging_cfg = os.path.join(os.path.dirname(__file__), 'logging.cfg')
-
- mm2.main( global_cfg, logging_cfg )
-
-run()
\ No newline at end of file
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/__init__.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimemediamonitorbootstrap.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimemediamonitorbootstrap.py
deleted file mode 100644
index 02b6cf2cf..000000000
--- a/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimemediamonitorbootstrap.py
+++ /dev/null
@@ -1,168 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import os
-import time
-import pyinotify
-import shutil
-
-class AirtimeMediaMonitorBootstrap():
-
- """AirtimeMediaMonitorBootstrap constructor
-
- Keyword Arguments:
- logger -- reference to the media-monitor logging facility
- pe -- reference to an instance of ProcessEvent
- api_clients -- reference of api_clients to communicate with airtime-server
- """
- def __init__(self, logger, pe, api_client, mmc, wm, config):
- self.logger = logger
- self.pe = pe
- self.api_client = api_client
- self.mmc = mmc
- self.wm = wm
- self.config = config
- # add /etc on watch list so we can detect mount
- self.mount_file = "/etc"
- self.curr_mtab_file = "/var/tmp/airtime/media-monitor/currMtab"
- self.logger.info("Adding %s on watch list...", self.mount_file)
- self.wm.add_watch(self.mount_file, pyinotify.ALL_EVENTS, rec=False, auto_add=False)
-
- tmp_dir = os.path.dirname(self.curr_mtab_file)
- if not os.path.exists(tmp_dir):
- os.makedirs(tmp_dir)
-
- # create currMtab file if it's the first time
- if not os.path.exists(self.curr_mtab_file):
- shutil.copy('/etc/mtab', self.curr_mtab_file)
-
- """On bootup we want to scan all directories and look for files that
- weren't there or files that changed before media-monitor process
- went offline.
- """
- def scan(self):
- directories = self.get_list_of_watched_dirs()
- self.logger.info("watched directories found: %s", directories)
-
- for id, dir in directories.iteritems():
- self.logger.debug("%s, %s", id, dir)
- self.sync_database_to_filesystem(id, dir)
-
- """Gets a list of files that the Airtime database knows for a specific directory.
- You need to provide the directory's row ID, which is obtained when calling
- get_list_of_watched_dirs function.
- dir_id -- row id of the directory in the cc_watched_dirs database table
- """
- def list_db_files(self, dir_id):
- return self.api_client.list_all_db_files(dir_id)
-
- """
- returns the path and its corresponding database row idfor all watched directories. Also
- returns the Stor directory, which can be identified by its row id (always has value of "1")
-
- Return type is a dictionary similar to:
- {"1":"/srv/airtime/stor/"}
- """
- def get_list_of_watched_dirs(self):
- json = self.api_client.list_all_watched_dirs()
-
- try:
- return json["dirs"]
- except KeyError as e:
- self.logger.error("Could not find index 'dirs' in dictionary: %s", str(json))
- self.logger.error(str(e))
- return {}
-
- """
- This function takes in a path name provided by the database (and its corresponding row id)
- and reads the list of files in the local file system. Its purpose is to discover which files
- exist on the file system but not in the database and vice versa, as well as which files have
- been modified since the database was last updated. In each case, this method will call an
- appropiate method to ensure that the database actually represents the filesystem.
- dir_id -- row id of the directory in the cc_watched_dirs database table
- dir -- pathname of the directory
- """
- def sync_database_to_filesystem(self, dir_id, dir):
- # TODO: is this line even necessary?
- dir = os.path.normpath(dir)+"/"
- """
- set to hold new and/or modified files. We use a set to make it ok if files are added
- twice. This is because some of the tests for new files return result sets that are not
- mutually exclusive from each other.
- """
- removed_files = set()
-
-
- db_known_files_set = set()
- files = self.list_db_files(dir_id)
-
- for f in files:
- db_known_files_set.add(f)
-
- all_files = self.mmc.clean_dirty_file_paths( self.mmc.scan_dir_for_new_files(dir) )
-
- all_files_set = set()
- for file_path in all_files:
- if self.config.problem_directory not in file_path:
- all_files_set.add(file_path[len(dir):])
-
- # if dir doesn't exists, update db
- if not os.path.exists(dir):
- self.pe.handle_stdout_files(dir)
-
- if os.path.exists(self.mmc.timestamp_file):
- """find files that have been modified since the last time media-monitor process started."""
- time_diff_sec = time.time() - os.path.getmtime(self.mmc.timestamp_file)
- command = self.mmc.find_command(directory=dir, extra_arguments=("-type f -readable -mmin -%d" % (time_diff_sec/60+1)))
- else:
- command = self.mmc.find_command(directory=dir, extra_arguments="-type f -readable")
-
- self.logger.debug(command)
- stdout = self.mmc.exec_command(command)
-
- if stdout is None:
- new_files = []
- else:
- new_files = stdout.splitlines()
-
- new_and_modified_files = set()
- for file_path in new_files:
- if self.config.problem_directory not in file_path:
- new_and_modified_files.add(file_path[len(dir):])
-
- """
- new_and_modified_files gives us a set of files that were either copied or modified
- since the last time media-monitor was running. These files were collected based on
- their modified timestamp. But this is not all that has changed in the directory. Files
- could have been removed, or files could have been moved into this directory (moving does
- not affect last modified timestamp). Lets get a list of files that are on the file-system
- that the db has no record of, and vice-versa.
- """
- deleted_files_set = db_known_files_set - all_files_set
- new_files_set = all_files_set - db_known_files_set
- modified_files_set = new_and_modified_files - new_files_set
-
- self.logger.info(u"Deleted files: \n%s\n\n", deleted_files_set)
- self.logger.info(u"New files: \n%s\n\n", new_files_set)
- self.logger.info(u"Modified files: \n%s\n\n", modified_files_set)
-
- #"touch" file timestamp
- try:
- self.mmc.touch_index_file()
- except Exception, e:
- self.logger.warn(e)
-
- for file_path in deleted_files_set:
- self.logger.debug("deleted file")
- full_file_path = os.path.join(dir, file_path)
- self.logger.debug(full_file_path)
- self.pe.handle_removed_file(False, full_file_path)
-
-
- for file_set, debug_message, handle_attribute in [(new_files_set, "new file", "handle_created_file"),
- (modified_files_set, "modified file", "handle_modified_file")]:
- for file_path in file_set:
- self.logger.debug(debug_message)
- full_file_path = os.path.join(dir, file_path)
- self.logger.debug(full_file_path)
- if os.path.exists(full_file_path):
- getattr(self.pe,handle_attribute)(False,full_file_path, os.path.basename(full_file_path))
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimemetadata.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimemetadata.py
deleted file mode 100644
index 731f619f2..000000000
--- a/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimemetadata.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import replaygain
-
-import os
-import hashlib
-import mutagen
-import logging
-import math
-import traceback
-
-
-"""
-list of supported easy tags in mutagen version 1.20
-['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry',
-'date', 'performer', 'musicbrainz_albumartistid', 'composer', 'encodedby',
-'tracknumber', 'musicbrainz_albumid', 'album', 'asin', 'musicbrainz_artistid',
-'mood', 'copyright', 'author', 'media', 'length', 'version', 'artistsort',
-'titlesort', 'discsubtitle', 'website', 'musicip_fingerprint', 'conductor',
-'compilation', 'barcode', 'performer:*', 'composersort', 'musicbrainz_discid',
-'musicbrainz_albumtype', 'genre', 'isrc', 'discnumber', 'musicbrainz_trmid',
-'replaygain_*_gain', 'musicip_puid', 'artist', 'title', 'bpm', 'musicbrainz_trackid',
-'arranger', 'albumsort', 'replaygain_*_peak', 'organization']
-"""
-
-class AirtimeMetadata:
-
- def __init__(self):
-
- self.airtime2mutagen = {\
- "MDATA_KEY_TITLE": "title", \
- "MDATA_KEY_CREATOR": "artist", \
- "MDATA_KEY_SOURCE": "album", \
- "MDATA_KEY_GENRE": "genre", \
- "MDATA_KEY_MOOD": "mood", \
- "MDATA_KEY_TRACKNUMBER": "tracknumber", \
- "MDATA_KEY_BPM": "bpm", \
- "MDATA_KEY_LABEL": "organization", \
- "MDATA_KEY_COMPOSER": "composer", \
- "MDATA_KEY_ENCODER": "encodedby", \
- "MDATA_KEY_CONDUCTOR": "conductor", \
- "MDATA_KEY_YEAR": "date", \
- "MDATA_KEY_URL": "website", \
- "MDATA_KEY_ISRC": "isrc", \
- "MDATA_KEY_COPYRIGHT": "copyright", \
- }
-
- self.mutagen2airtime = {\
- "title": "MDATA_KEY_TITLE", \
- "artist": "MDATA_KEY_CREATOR", \
- "album": "MDATA_KEY_SOURCE", \
- "genre": "MDATA_KEY_GENRE", \
- "mood": "MDATA_KEY_MOOD", \
- "tracknumber": "MDATA_KEY_TRACKNUMBER", \
- "bpm": "MDATA_KEY_BPM", \
- "organization": "MDATA_KEY_LABEL", \
- "composer": "MDATA_KEY_COMPOSER", \
- "encodedby": "MDATA_KEY_ENCODER", \
- "conductor": "MDATA_KEY_CONDUCTOR", \
- "date": "MDATA_KEY_YEAR", \
- "website": "MDATA_KEY_URL", \
- "isrc": "MDATA_KEY_ISRC", \
- "copyright": "MDATA_KEY_COPYRIGHT", \
- }
-
- self.logger = logging.getLogger()
-
- def get_md5(self, filepath):
- """
- Returns an md5 of the file located at filepath. Returns an empty string
- if there was an error reading the file.
- """
- try:
- f = open(filepath, 'rb')
- m = hashlib.md5()
- m.update(f.read())
- md5 = m.hexdigest()
- except Exception, e:
- return ""
-
- return md5
-
- ## mutagen_length is in seconds with the format (d+).dd
- ## return format hh:mm:ss.uuu
- def format_length(self, mutagen_length):
- t = float(mutagen_length)
- h = int(math.floor(t / 3600))
- t = t % 3600
- m = int(math.floor(t / 60))
-
- s = t % 60
- # will be ss.uuu
- s = str(s)
- seconds = s.split(".")
- s = seconds[0]
-
- # have a maximum of 6 subseconds.
- if len(seconds[1]) >= 6:
- ss = seconds[1][0:6]
- else:
- ss = seconds[1][0:]
-
- length = "%s:%s:%s.%s" % (h, m, s, ss)
-
- return length
-
- def save_md_to_file(self, m):
- try:
- airtime_file = mutagen.File(m['MDATA_KEY_FILEPATH'], easy=True)
-
- for key in m:
- if key in self.airtime2mutagen:
- value = m[key]
-
- if value is not None:
- value = unicode(value)
- else:
- value = unicode('');
-
- #if len(value) > 0:
- self.logger.debug("Saving key '%s' with value '%s' to file", key, value)
- airtime_file[self.airtime2mutagen[key]] = value
-
- airtime_file.save()
- except Exception, e:
- self.logger.error('Trying to save md')
- self.logger.error('Exception: %s', e)
- self.logger.error('Filepath %s', m['MDATA_KEY_FILEPATH'])
-
- def truncate_to_length(self, item, length):
- if isinstance(item, int):
- item = str(item)
- if isinstance(item, basestring):
- if len(item) > length:
- return item[0:length]
- else:
- return item
-
- def get_md_from_file(self, filepath):
- """
- Returns None if error retrieving metadata. Otherwise returns a dictionary
- representing the file's metadata
- """
-
- self.logger.info("getting info from filepath %s", filepath)
-
- md = {}
-
- replay_gain_val = replaygain.calculate_replay_gain(filepath)
- self.logger.info('ReplayGain calculated as %s for %s' % (replay_gain_val, filepath))
- md['MDATA_KEY_REPLAYGAIN'] = replay_gain_val
-
- try:
-
- md5 = self.get_md5(filepath)
- md['MDATA_KEY_MD5'] = md5
-
- file_info = mutagen.File(filepath, easy=True)
- except Exception, e:
- self.logger.error("failed getting metadata from %s", filepath)
- self.logger.error("Exception %s", e)
- return None
-
-
- #check if file has any metadata
- if file_info is None:
- return None
-
- for key in file_info.keys() :
- if key in self.mutagen2airtime:
- val = file_info[key]
- try:
- if val is not None and len(val) > 0 and val[0] is not None and len(val[0]) > 0:
- md[self.mutagen2airtime[key]] = val[0]
- except Exception, e:
- self.logger.error('Exception: %s', e)
- self.logger.error("traceback: %s", traceback.format_exc())
- if 'MDATA_KEY_TITLE' not in md:
- #get rid of file extension from original name, name might have more than 1 '.' in it.
- original_name = os.path.basename(filepath)
- original_name = original_name.split(".")[0:-1]
- original_name = ''.join(original_name)
- md['MDATA_KEY_TITLE'] = original_name
-
- #incase track number is in format u'4/11'
- #need to also check that the tracknumber is even a tracknumber (cc-2582)
- if 'MDATA_KEY_TRACKNUMBER' in md:
- try:
- md['MDATA_KEY_TRACKNUMBER'] = int(md['MDATA_KEY_TRACKNUMBER'])
- except Exception, e:
- pass
-
- if isinstance(md['MDATA_KEY_TRACKNUMBER'], basestring):
- try:
- md['MDATA_KEY_TRACKNUMBER'] = int(md['MDATA_KEY_TRACKNUMBER'].split("/")[0], 10)
- except Exception, e:
- del md['MDATA_KEY_TRACKNUMBER']
-
- #make sure bpm is valid, need to check more types of formats for this tag to assure correct parsing.
- if 'MDATA_KEY_BPM' in md:
- if isinstance(md['MDATA_KEY_BPM'], basestring):
- try:
- md['MDATA_KEY_BPM'] = int(md['MDATA_KEY_BPM'])
- except Exception, e:
- del md['MDATA_KEY_BPM']
-
- #following metadata is truncated if needed to fit db requirements.
- if 'MDATA_KEY_GENRE' in md:
- md['MDATA_KEY_GENRE'] = self.truncate_to_length(md['MDATA_KEY_GENRE'], 64)
-
- if 'MDATA_KEY_TITLE' in md:
- md['MDATA_KEY_TITLE'] = self.truncate_to_length(md['MDATA_KEY_TITLE'], 512)
-
- if 'MDATA_KEY_CREATOR' in md:
- md['MDATA_KEY_CREATOR'] = self.truncate_to_length(md['MDATA_KEY_CREATOR'], 512)
-
- if 'MDATA_KEY_SOURCE' in md:
- md['MDATA_KEY_SOURCE'] = self.truncate_to_length(md['MDATA_KEY_SOURCE'], 512)
-
- if 'MDATA_KEY_MOOD' in md:
- md['MDATA_KEY_MOOD'] = self.truncate_to_length(md['MDATA_KEY_MOOD'], 64)
-
- if 'MDATA_KEY_LABEL' in md:
- md['MDATA_KEY_LABEL'] = self.truncate_to_length(md['MDATA_KEY_LABEL'], 512)
-
- if 'MDATA_KEY_COMPOSER' in md:
- md['MDATA_KEY_COMPOSER'] = self.truncate_to_length(md['MDATA_KEY_COMPOSER'], 512)
-
- if 'MDATA_KEY_ENCODER' in md:
- md['MDATA_KEY_ENCODER'] = self.truncate_to_length(md['MDATA_KEY_ENCODER'], 255)
-
- if 'MDATA_KEY_CONDUCTOR' in md:
- md['MDATA_KEY_CONDUCTOR'] = self.truncate_to_length(md['MDATA_KEY_CONDUCTOR'], 512)
-
- if 'MDATA_KEY_YEAR' in md:
- md['MDATA_KEY_YEAR'] = self.truncate_to_length(md['MDATA_KEY_YEAR'], 16)
-
- if 'MDATA_KEY_URL' in md:
- md['MDATA_KEY_URL'] = self.truncate_to_length(md['MDATA_KEY_URL'], 512)
-
- if 'MDATA_KEY_ISRC' in md:
- md['MDATA_KEY_ISRC'] = self.truncate_to_length(md['MDATA_KEY_ISRC'], 512)
-
- if 'MDATA_KEY_COPYRIGHT' in md:
- md['MDATA_KEY_COPYRIGHT'] = self.truncate_to_length(md['MDATA_KEY_COPYRIGHT'], 512)
- #end of db truncation checks.
-
- try:
- md['MDATA_KEY_BITRATE'] = getattr(file_info.info, "bitrate", 0)
- md['MDATA_KEY_SAMPLERATE'] = getattr(file_info.info, "sample_rate", 0)
-
- md['MDATA_KEY_DURATION'] = self.format_length(getattr(file_info.info, "length", 0.0))
-
- md['MDATA_KEY_MIME'] = ""
- if len(file_info.mime) > 0:
- md['MDATA_KEY_MIME'] = file_info.mime[0]
- except Exception as e:
- self.logger.warn(e)
-
- if "mp3" in md['MDATA_KEY_MIME']:
- md['MDATA_KEY_FTYPE'] = "audioclip"
- elif "vorbis" in md['MDATA_KEY_MIME']:
- md['MDATA_KEY_FTYPE'] = "audioclip"
- else:
- self.logger.error("File %s of mime type %s does not appear to be a valid vorbis or mp3 file." % (filepath, md['MDATA_KEY_MIME']))
- return None
-
- return md
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimenotifier.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimenotifier.py
deleted file mode 100644
index dc27a36ec..000000000
--- a/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimenotifier.py
+++ /dev/null
@@ -1,213 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import json
-import time
-import os
-import logging
-import traceback
-
-# For RabbitMQ
-from kombu.connection import BrokerConnection
-from kombu.messaging import Exchange, Queue, Consumer
-
-import pyinotify
-from pyinotify import Notifier
-
-from airtimemetadata import AirtimeMetadata
-
-class AirtimeNotifier(Notifier):
-
- def __init__(self, watch_manager, default_proc_fun=None, read_freq=0, threshold=0, timeout=None, airtime_config=None, api_client=None, bootstrap=None, mmc=None):
- Notifier.__init__(self, watch_manager, default_proc_fun, read_freq, threshold, timeout)
-
- self.logger = logging.getLogger()
- self.config = airtime_config
- self.api_client = api_client
- self.bootstrap = bootstrap
- self.md_manager = AirtimeMetadata()
- self.import_processes = {}
- self.watched_folders = []
- self.mmc = mmc
- self.wm = watch_manager
- self.mask = pyinotify.ALL_EVENTS
-
- while not self.init_rabbit_mq():
- self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds")
- time.sleep(5)
-
- def init_rabbit_mq(self):
- """
- This function will attempt to connect to RabbitMQ Server and if successful
- return 'True'. Returns 'False' otherwise.
- """
-
- self.logger.info("Initializing RabbitMQ stuff")
- try:
- schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True)
- schedule_queue = Queue("media-monitor", exchange=schedule_exchange, key="filesystem")
- self.connection = BrokerConnection(self.config.cfg["rabbitmq"]["rabbitmq_host"], self.config.cfg["rabbitmq"]["rabbitmq_user"], self.config.cfg["rabbitmq"]["rabbitmq_password"], self.config.cfg["rabbitmq"]["rabbitmq_vhost"])
- channel = self.connection.channel()
- consumer = Consumer(channel, schedule_queue)
- consumer.register_callback(self.handle_message)
- consumer.consume()
- except Exception, e:
- self.logger.error(e)
- return False
-
- return True
-
- def handle_message(self, body, message):
- """
- Messages received from RabbitMQ are handled here. These messages
- instruct media-monitor of events such as a new directory being watched,
- file metadata has been changed, or any other changes to the config of
- media-monitor via the web UI.
- """
- # ACK the message to take it off the queue
- message.ack()
-
- self.logger.info("Received md from RabbitMQ: " + body)
- m = json.loads(message.body)
-
- if m['event_type'] == "md_update":
- self.logger.info("AIRTIME NOTIFIER md update event")
- self.md_manager.save_md_to_file(m)
-
- elif m['event_type'] == "new_watch":
- self.logger.info("AIRTIME NOTIFIER add watched folder event " + m['directory'])
- self.walk_newly_watched_directory(m['directory'])
-
- self.watch_directory(m['directory'])
-
- elif m['event_type'] == "remove_watch":
- watched_directory = m['directory']
-
- mm = self.proc_fun()
- wd = mm.wm.get_wd(watched_directory)
- self.logger.info("Removing watch on: %s wd %s", watched_directory, wd)
- mm.wm.rm_watch(wd, rec=True)
-
- elif m['event_type'] == "rescan_watch":
- self.bootstrap.sync_database_to_filesystem(str(m['id']), m['directory'])
-
- elif m['event_type'] == "change_stor":
- storage_directory = self.config.storage_directory
- new_storage_directory = m['directory']
- new_storage_directory_id = str(m['dir_id'])
-
- mm = self.proc_fun()
-
- wd = mm.wm.get_wd(storage_directory)
- self.logger.info("Removing watch on: %s wd %s", storage_directory, wd)
- mm.wm.rm_watch(wd, rec=True)
-
- self.bootstrap.sync_database_to_filesystem(new_storage_directory_id, new_storage_directory)
-
- self.config.storage_directory = os.path.normpath(new_storage_directory)
- self.config.imported_directory = os.path.normpath(os.path.join(new_storage_directory, '/imported'))
- self.config.organize_directory = os.path.normpath(os.path.join(new_storage_directory, '/organize'))
-
- for directory in [self.config.storage_directory, self.config.imported_directory, self.config.organize_directory]:
- self.mmc.ensure_is_dir(directory)
- self.mmc.is_readable(directory, True)
-
- self.watch_directory(new_storage_directory)
- elif m['event_type'] == "file_delete":
- filepath = m['filepath']
-
- mm = self.proc_fun()
- self.logger.info("Adding file to ignore: %s ", filepath)
- mm.add_filepath_to_ignore(filepath)
-
- if m['delete']:
- self.logger.info("Deleting file: %s ", filepath)
- try:
- os.unlink(filepath)
- except Exception, e:
- self.logger.error('Exception: %s', e)
- self.logger.error("traceback: %s", traceback.format_exc())
-
-
- def update_airtime(self, event):
- """
- Update airtime with information about files discovered in our
- watched directories.
- event: a dict() object with the following attributes:
- -filepath
- -mode
- -data
- -is_recorded_show
- """
- try:
- self.logger.info("updating filepath: %s ", event['filepath'])
- filepath = event['filepath']
- mode = event['mode']
-
- md = {}
- md['MDATA_KEY_FILEPATH'] = os.path.normpath(filepath)
-
- if 'data' in event:
- file_md = event['data']
- md.update(file_md)
- else:
- file_md = None
-
- if (os.path.exists(filepath) and (mode == self.config.MODE_CREATE)):
- if file_md is None:
- mutagen = self.md_manager.get_md_from_file(filepath)
- if mutagen is None:
- return
- md.update(mutagen)
-
- if 'is_recorded_show' in event and event['is_recorded_show']:
- self.api_client.update_media_metadata(md, mode, True)
- else:
- self.api_client.update_media_metadata(md, mode)
-
- elif (os.path.exists(filepath) and (mode == self.config.MODE_MODIFY)):
- mutagen = self.md_manager.get_md_from_file(filepath)
- if mutagen is None:
- return
- md.update(mutagen)
- if 'is_recorded_show' in event and event['is_recorded_show']:
- self.api_client.update_media_metadata(md, mode, True)
- else:
- self.api_client.update_media_metadata(md, mode)
- elif (mode == self.config.MODE_MOVED):
- md['MDATA_KEY_MD5'] = self.md_manager.get_md5(filepath)
- if 'is_recorded_show' in event and event['is_recorded_show']:
- self.api_client.update_media_metadata(md, mode, True)
- else:
- self.api_client.update_media_metadata(md, mode)
- elif (mode == self.config.MODE_DELETE):
- self.api_client.update_media_metadata(md, mode)
-
- elif (mode == self.config.MODE_DELETE_DIR):
- self.api_client.update_media_metadata(md, mode)
-
- except Exception, e:
- self.logger.error("failed updating filepath: %s ", event['filepath'])
- self.logger.error('Exception: %s', e)
- self.logger.error('Traceback: %s', traceback.format_exc())
-
- #define which directories the pyinotify WatchManager should watch.
- def watch_directory(self, directory):
- return self.wm.add_watch(directory, self.mask, rec=True, auto_add=True)
-
- def walk_newly_watched_directory(self, directory):
-
- mm = self.proc_fun()
-
- self.mmc.is_readable(directory, True)
- for (path, dirs, files) in os.walk(directory):
- for filename in files:
- full_filepath = os.path.join(path, filename)
-
- if self.mmc.is_audio_file(full_filepath):
- if self.mmc.is_readable(full_filepath, False):
- self.logger.info("importing %s", full_filepath)
- event = {'filepath': full_filepath, 'mode': self.config.MODE_CREATE, 'is_recorded_show': False}
- mm.multi_queue.put(event)
- else:
- self.logger.warn("file '%s' has does not have sufficient read permissions. Ignoring.", full_filepath)
-
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimeprocessevent.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimeprocessevent.py
deleted file mode 100644
index 90b8a0151..000000000
--- a/python_apps/media-monitor/media_monitor/airtimefilemonitor/airtimeprocessevent.py
+++ /dev/null
@@ -1,431 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import socket
-import logging
-import time
-import os
-import shutil
-import difflib
-import traceback
-from subprocess import Popen, PIPE
-
-from pyinotify import ProcessEvent
-
-from airtimemetadata import AirtimeMetadata
-from airtimefilemonitor.mediaconfig import AirtimeMediaConfig
-
-from api_clients import api_client
-
-class AirtimeProcessEvent(ProcessEvent):
-
- #TODO
- def my_init(self, queue, airtime_config=None, wm=None, mmc=None, api_client=api_client):
- """
- Method automatically called from ProcessEvent.__init__(). Additional
- keyworded arguments passed to ProcessEvent.__init__() are then
- delegated to my_init().
- """
- self.logger = logging.getLogger()
- self.config = airtime_config
-
- #put the file path into this dict if we want to ignore certain
- #events. For example, when deleting a file from the web ui, we
- #are going to delete it from the db on the server side, so media-monitor
- #doesn't need to contact the server and tell it to delete again.
- self.ignore_event = set()
-
- self.temp_files = {}
- self.cookies_IN_MOVED_FROM = {}
- self.file_events = []
- self.multi_queue = queue
- self.wm = wm
- self.md_manager = AirtimeMetadata()
- self.mmc = mmc
- self.api_client = api_client
- self.create_dict = {}
- self.mount_file_dir = "/etc";
- self.mount_file = "/etc/mtab";
- self.curr_mtab_file = "/var/tmp/airtime/media-monitor/currMtab"
- self.prev_mtab_file = "/var/tmp/airtime/media-monitor/prevMtab"
-
- def add_filepath_to_ignore(self, filepath):
- self.ignore_event.add(filepath)
-
- def process_IN_MOVE_SELF(self, event):
- self.logger.info("event: %s", event)
- path = event.path
- if event.dir:
- if "-unknown-path" in path:
- unknown_path = path
- pos = path.find("-unknown-path")
- path = path[0:pos] + "/"
-
- list = self.api_client.list_all_watched_dirs()
- # case where the dir that is being watched is moved to somewhere
- if path in list[u'dirs'].values():
- self.logger.info("Requesting the airtime server to remove '%s'", path)
- res = self.api_client.remove_watched_dir(path)
- if(res is None):
- self.logger.info("Unable to connect to the Airtime server.")
- # sucess
- if(res['msg']['code'] == 0):
- self.logger.info("%s removed from watch folder list successfully.", path)
- else:
- self.logger.info("Removing the watch folder failed: %s", res['msg']['error'])
- else:
- # subdir being moved
- # in this case, it has to remove watch manualy and also have to manually delete all records
- # on cc_files table
- wd = self.wm.get_wd(unknown_path)
- self.logger.info("Removing watch on: %s wd %s", unknown_path, wd)
- self.wm.rm_watch(wd, rec=True)
- self.file_events.append({'mode': self.config.MODE_DELETE_DIR, 'filepath': path})
-
-
- def process_IN_DELETE_SELF(self, event):
-
- #we only care about files that have been moved away from imported/ or organize/ dir
- if event.path in self.config.problem_directory or event.path in self.config.organize_directory:
- return
-
- self.logger.info("event: %s", event)
- path = event.path + '/'
- if event.dir:
- list = self.api_client.list_all_watched_dirs()
- if path in list[u'dirs'].values():
- self.logger.info("Requesting the airtime server to remove '%s'", path)
- res = self.api_client.remove_watched_dir(path)
- if(res is None):
- self.logger.info("Unable to connect to the Airtime server.")
- # sucess
- if(res['msg']['code'] == 0):
- self.logger.info("%s removed from watch folder list successfully.", path)
- else:
- self.logger.info("Removing the watch folder failed: %s", res['msg']['error'])
-
- def process_IN_CREATE(self, event):
- if event.path in self.mount_file_dir:
- return
- self.logger.info("event: %s", event)
- if not event.dir:
- # record the timestamp of the time on IN_CREATE event
- self.create_dict[event.pathname] = time.time()
-
- #event.dir: True if the event was raised against a directory.
- #event.name: filename
- #event.pathname: pathname (str): Concatenation of 'path' and 'name'.
- # we used to use IN_CREATE event, but the IN_CREATE event gets fired before the
- # copy was done. Hence, IN_CLOSE_WRITE is the correct one to handle.
- def process_IN_CLOSE_WRITE(self, event):
- if event.path in self.mount_file_dir:
- return
- self.logger.info("event: %s", event)
- self.logger.info("create_dict: %s", self.create_dict)
-
- try:
- del self.create_dict[event.pathname]
- self.handle_created_file(event.dir, event.pathname, event.name)
- except KeyError, e:
- pass
- #self.logger.warn("%s does not exist in create_dict", event.pathname)
- #Uncomment the above warning when we fix CC-3830 for 2.1.1
-
-
- def handle_created_file(self, dir, pathname, name):
- if not dir:
- self.logger.debug("PROCESS_IN_CLOSE_WRITE: %s, name: %s, pathname: %s ", dir, name, pathname)
-
- if self.mmc.is_temp_file(name) :
- #file created is a tmp file which will be modified and then moved back to the original filename.
- #Easy Tag creates this when changing metadata of ogg files.
- self.temp_files[pathname] = None
- #file is being overwritten/replaced in GUI.
- elif "goutputstream" in pathname:
- self.temp_files[pathname] = None
- elif self.mmc.is_audio_file(name):
- if self.mmc.is_parent_directory(pathname, self.config.organize_directory):
-
- #file was created in /srv/airtime/stor/organize. Need to process and move
- #to /srv/airtime/stor/imported
- file_md = self.md_manager.get_md_from_file(pathname)
- playable = self.mmc.test_file_playability(pathname)
-
- if file_md and playable:
- self.mmc.organize_new_file(pathname, file_md)
- else:
- #move to problem_files
- self.mmc.move_to_problem_dir(pathname)
-
- else:
- # only append to self.file_events if the file isn't going to be altered by organize_new_file(). If file is going
- # to be altered by organize_new_file(), then process_IN_MOVED_TO event will handle appending it to self.file_events
- is_recorded = self.mmc.is_parent_directory(pathname, self.config.recorded_directory)
- self.file_events.append({'mode': self.config.MODE_CREATE, 'filepath': pathname, 'is_recorded_show': is_recorded})
-
-
- def process_IN_MODIFY(self, event):
- # if IN_MODIFY is followed by IN_CREATE, it's not true modify event
- if not event.pathname in self.create_dict:
- self.logger.info("process_IN_MODIFY: %s", event)
- self.handle_modified_file(event.dir, event.pathname, event.name)
-
- def handle_modified_file(self, dir, pathname, name):
- # if /etc/mtab is modified
- if pathname in self.mount_file:
- self.handle_mount_change()
- # update timestamp on create_dict for the entry with pathname as the key
- if pathname in self.create_dict:
- self.create_dict[pathname] = time.time()
- if not dir and not self.mmc.is_parent_directory(pathname, self.config.organize_directory):
- self.logger.info("Modified: %s", pathname)
- if self.mmc.is_audio_file(name):
- is_recorded = self.mmc.is_parent_directory(pathname, self.config.recorded_directory)
- self.file_events.append({'filepath': pathname, 'mode': self.config.MODE_MODIFY, 'is_recorded_show': is_recorded})
-
- # if change is detected on /etc/mtab, we check what mount(file system) was added/removed
- # and act accordingly
- def handle_mount_change(self):
- self.logger.info("Mount change detected, handling changes...");
- # take snapshot of mtab file and update currMtab and prevMtab
- # move currMtab to prevMtab and create new currMtab
- shutil.move(self.curr_mtab_file, self.prev_mtab_file)
- # create the file
- shutil.copy(self.mount_file, self.curr_mtab_file)
-
- d = difflib.Differ()
- curr_fh = open(self.curr_mtab_file, 'r')
- prev_fh = open(self.prev_mtab_file, 'r')
-
- diff = list(d.compare(prev_fh.readlines(), curr_fh.readlines()))
- added_mount_points = []
- removed_mount_points = []
-
- for dir in diff:
- info = dir.split(' ')
- if info[0] == '+':
- added_mount_points.append(info[2])
- elif info[0] == '-':
- removed_mount_points.append(info[2])
-
- self.logger.info("added: %s", added_mount_points)
- self.logger.info("removed: %s", removed_mount_points)
-
- # send current mount information to Airtime
- self.api_client.update_file_system_mount(added_mount_points, removed_mount_points);
-
- def handle_watched_dir_missing(self, dir):
- self.api_client.handle_watched_dir_missing(dir);
-
- #if a file is moved somewhere, this callback is run. With details about
- #where the file is being moved from. The corresponding process_IN_MOVED_TO
- #callback is only called if the destination of the file is also in a watched
- #directory.
- def process_IN_MOVED_FROM(self, event):
-
- #we don't care about files that have been moved from problem_directory
- if event.path in self.config.problem_directory:
- return
-
- self.logger.info("process_IN_MOVED_FROM: %s", event)
- if not event.dir:
- if event.pathname in self.temp_files:
- self.temp_files[event.cookie] = event.pathname
- elif not self.mmc.is_parent_directory(event.pathname, self.config.organize_directory):
- #we don't care about moved_from events from the organize dir.
- if self.mmc.is_audio_file(event.name):
- self.cookies_IN_MOVED_FROM[event.cookie] = (event, time.time())
- else:
- self.cookies_IN_MOVED_FROM[event.cookie] = (event, time.time())
-
- def process_IN_MOVED_TO(self, event):
- self.logger.info("process_IN_MOVED_TO: %s", event)
- # if /etc/mtab is modified
- filename = self.mount_file_dir + "/mtab"
- if event.pathname in filename:
- self.handle_mount_change()
-
- if event.path in self.config.problem_directory:
- return
-
- if not event.dir:
- if self.mmc.is_audio_file(event.name):
- if event.cookie in self.temp_files:
- self.file_events.append({'filepath': event.pathname, 'mode': self.config.MODE_MODIFY})
- del self.temp_files[event.cookie]
- elif event.cookie in self.cookies_IN_MOVED_FROM:
- #file's original location was also in a watched directory
- del self.cookies_IN_MOVED_FROM[event.cookie]
- if self.mmc.is_parent_directory(event.pathname, self.config.organize_directory):
-
-
-
- pathname = event.pathname
- #file was created in /srv/airtime/stor/organize. Need to process and move
- #to /srv/airtime/stor/imported
- file_md = self.md_manager.get_md_from_file(pathname)
- playable = self.mmc.test_file_playability(pathname)
-
- if file_md and playable:
- filepath = self.mmc.organize_new_file(pathname, file_md)
- else:
- #move to problem_files
- self.mmc.move_to_problem_dir(pathname)
-
-
-
- else:
- filepath = event.pathname
-
- if (filepath is not None):
- self.file_events.append({'filepath': filepath, 'mode': self.config.MODE_MOVED})
- else:
- #file's original location is from outside an inotify watched dir.
- pathname = event.pathname
- if self.mmc.is_parent_directory(pathname, self.config.organize_directory):
-
-
-
-
- #file was created in /srv/airtime/stor/organize. Need to process and move
- #to /srv/airtime/stor/imported
- file_md = self.md_manager.get_md_from_file(pathname)
- playable = self.mmc.test_file_playability(pathname)
-
- if file_md and playable:
- self.mmc.organize_new_file(pathname, file_md)
- else:
- #move to problem_files
- self.mmc.move_to_problem_dir(pathname)
-
-
-
-
- else:
- #show moved from unwatched folder into a watched folder. Do not "organize".
- is_recorded = self.mmc.is_parent_directory(event.pathname, self.config.recorded_directory)
- self.file_events.append({'mode': self.config.MODE_CREATE, 'filepath': event.pathname, 'is_recorded_show': is_recorded})
- else:
- #When we move a directory into a watched_dir, we only get a notification that the dir was created,
- #and no additional information about files that came along with that directory.
- #need to scan the entire directory for files.
-
- if event.cookie in self.cookies_IN_MOVED_FROM:
- del self.cookies_IN_MOVED_FROM[event.cookie]
- mode = self.config.MODE_MOVED
- else:
- mode = self.config.MODE_CREATE
-
- files = self.mmc.scan_dir_for_new_files(event.pathname)
- if self.mmc.is_parent_directory(event.pathname, self.config.organize_directory):
- for pathname in files:
-
-
-
- #file was created in /srv/airtime/stor/organize. Need to process and move
- #to /srv/airtime/stor/imported
- file_md = self.md_manager.get_md_from_file(pathname)
- playable = self.mmc.test_file_playability(pathname)
-
- if file_md and playable:
- self.mmc.organize_new_file(pathname, file_md)
- #self.file_events.append({'mode': mode, 'filepath': filepath, 'is_recorded_show': False})
- else:
- #move to problem_files
- self.mmc.move_to_problem_dir(pathname)
-
-
-
- else:
- for file in files:
- self.file_events.append({'mode': mode, 'filepath': file, 'is_recorded_show': False})
-
-
- def process_IN_DELETE(self, event):
- if event.path in self.mount_file_dir:
- return
- self.logger.info("process_IN_DELETE: %s", event)
- self.handle_removed_file(event.dir, event.pathname)
-
- def handle_removed_file(self, dir, pathname):
- self.logger.info("Deleting %s", pathname)
- if not dir:
- if self.mmc.is_audio_file(pathname):
- if pathname in self.ignore_event:
- self.logger.info("pathname in ignore event")
- self.ignore_event.remove(pathname)
- elif not self.mmc.is_parent_directory(pathname, self.config.organize_directory):
- self.logger.info("deleting a file not in organize")
- #we don't care if a file was deleted from the organize directory.
- self.file_events.append({'filepath': pathname, 'mode': self.config.MODE_DELETE})
-
-
- def process_default(self, event):
- pass
-
- def notifier_loop_callback(self, notifier):
- if len(self.file_events) > 0:
- for event in self.file_events:
- self.multi_queue.put(event)
- self.mmc.touch_index_file()
-
- self.file_events = []
-
- #yield to worker thread
- time.sleep(0)
-
- #use items() because we are going to be modifying this
- #dictionary while iterating over it.
- for k, pair in self.cookies_IN_MOVED_FROM.items():
- event = pair[0]
- timestamp = pair[1]
-
- timestamp_now = time.time()
-
- if timestamp_now - timestamp > 5:
- #in_moved_from event didn't have a corresponding
- #in_moved_to event in the last 5 seconds.
- #This means the file was moved to outside of the
- #watched directories. Let's handle this by deleting
- #it from the Airtime directory.
- del self.cookies_IN_MOVED_FROM[k]
- self.handle_removed_file(False, event.pathname)
-
- # we don't want create_dict grow infinitely
- # this part is like a garbage collector
- for k, t in self.create_dict.items():
- now = time.time()
- if now - t > 5:
- # check if file exist
- # When whole directory is copied to the organized dir,
- # inotify doesn't fire IN_CLOSE_WRITE, hench we need special way of
- # handling those cases. We are manully calling handle_created_file
- # function.
- if os.path.exists(k):
- # check if file is open
- try:
- command = "lsof " + k
- #f = os.popen(command)
- f = Popen(command, shell=True, stdout=PIPE).stdout
- except Exception, e:
- self.logger.error('Exception: %s', e)
- self.logger.error("traceback: %s", traceback.format_exc())
- continue
-
- if not f.readlines():
- self.logger.info("Handling file: %s", k)
- self.handle_created_file(False, k, os.path.basename(k))
- del self.create_dict[k]
- else:
- del self.create_dict[k]
-
- #check for any events received from Airtime.
- try:
- notifier.connection.drain_events(timeout=0.1)
- #avoid logging a bunch of timeout messages.
- except socket.timeout:
- pass
- except Exception, e:
- self.logger.error('Exception: %s', e)
- self.logger.error("traceback: %s", traceback.format_exc())
- time.sleep(3)
-
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/mediaconfig.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/mediaconfig.py
deleted file mode 100644
index 5f3865829..000000000
--- a/python_apps/media-monitor/media_monitor/airtimefilemonitor/mediaconfig.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import sys
-
-from configobj import ConfigObj
-
-class AirtimeMediaConfig:
-
- MODE_CREATE = "create"
- MODE_MODIFY = "modify"
- MODE_MOVED = "moved"
- MODE_DELETE = "delete"
- MODE_DELETE_DIR = "delete_dir"
-
- def __init__(self, logger):
-
- # loading config file
- try:
- config = ConfigObj('/etc/airtime/airtime.conf')
- self.cfg = config
- except Exception, e:
- logger.info('Error loading config: ', e)
- sys.exit(1)
-
- self.storage_directory = None
-
-
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/mediamonitorcommon.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/mediamonitorcommon.py
deleted file mode 100644
index c298ccf84..000000000
--- a/python_apps/media-monitor/media_monitor/airtimefilemonitor/mediamonitorcommon.py
+++ /dev/null
@@ -1,341 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import os
-import grp
-import pwd
-import logging
-import stat
-import subprocess
-import traceback
-
-from subprocess import Popen, PIPE
-from airtimemetadata import AirtimeMetadata
-import pyinotify
-
-class MediaMonitorCommon:
-
- timestamp_file = "/var/tmp/airtime/media-monitor/last_index"
- supported_file_formats = ['mp3', 'ogg']
-
- def __init__(self, airtime_config, wm=None):
- self.logger = logging.getLogger()
- self.config = airtime_config
- self.md_manager = AirtimeMetadata()
- self.wm = wm
-
-
- def clean_dirty_file_paths(self, dirty_files):
- """ clean dirty file paths by removing blanks and removing trailing/leading whitespace"""
- return filter(lambda e: len(e) > 0, [ f.strip(" \n") for f in dirty_files ])
-
- def find_command(self, directory, extra_arguments=""):
- """ Builds a find command that respects supported_file_formats list
- Note: Use single quotes to quote arguments """
- ext_globs = [ "-iname '*.%s'" % ext for ext in self.supported_file_formats ]
- find_glob = ' -o '.join(ext_globs)
- return "find '%s' %s %s" % (directory, find_glob, extra_arguments)
-
- def is_parent_directory(self, filepath, directory):
- filepath = os.path.normpath(filepath)
- directory = os.path.normpath(directory)
- return (directory == filepath[0:len(directory)])
-
- def is_temp_file(self, filename):
- info = filename.split(".")
- # if file doesn't have any extension, info[-2] throws exception
- # Hence, checking length of info before we do anything
- if(len(info) >= 2):
- return info[-2].lower() in self.supported_file_formats
- else:
- return False
-
- def is_audio_file(self, filename):
- info = filename.split(".")
- if len(info) < 2: return False # handle cases like filename="mp3"
- return info[-1].lower() in self.supported_file_formats
-
- #check if file is readable by "nobody"
- def is_user_readable(self, filepath, euid='nobody', egid='nogroup'):
- f = None
- try:
- uid = pwd.getpwnam(euid)[2]
- gid = grp.getgrnam(egid)[2]
- #drop root permissions and become "nobody"
- os.setegid(gid)
- os.seteuid(uid)
- f = open(filepath)
- readable = True
- except IOError:
- self.logger.warn("File does not have correct permissions: '%s'", filepath)
- readable = False
- except Exception, e:
- self.logger.error("Unexpected exception thrown: %s", e)
- readable = False
- self.logger.error("traceback: %s", traceback.format_exc())
- finally:
- #reset effective user to root
- if f: f.close()
- os.seteuid(0)
- os.setegid(0)
- return readable
-
- # the function only changes the permission if its not readable by www-data
- def is_readable(self, item, is_dir):
- try:
- return self.is_user_readable(item, 'www-data', 'www-data')
- except Exception:
- self.logger.warn(u"Failed to check owner/group/permissions for %s", item)
- return False
-
- def make_file_readable(self, pathname, is_dir):
- if is_dir:
- #set to 755
- os.chmod(pathname, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
- else:
- #set to 644
- os.chmod(pathname, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
-
- def make_readable(self, pathname):
- """
- Should only call this function if is_readable() returns False. This function
- will attempt to make the file world readable by modifying the file's permission's
- as well as the file's parent directory permissions. We should only call this function
- on files in Airtime's stor directory, not watched directories!
-
- Returns True if we were able to make the file world readable. False otherwise.
- """
- original_file = pathname
- is_dir = False
- try:
- while not self.is_readable(original_file, is_dir):
- #Not readable. Make appropriate permission changes.
- self.make_file_readable(pathname, is_dir)
-
- dirname = os.path.dirname(pathname)
- if dirname == pathname:
- #most likey reason for this is that we've hit '/'. Avoid infinite loop by terminating loop
- raise Exception()
- else:
- pathname = dirname
- is_dir = True
- except Exception:
- #something went wrong while we were trying to make world readable.
- return False
-
- return True
-
- #checks if path is a directory, and if it doesnt exist, then creates it.
- #Otherwise prints error to log file.
- def ensure_is_dir(self, directory):
- try:
- omask = os.umask(0)
- if not os.path.exists(directory):
- os.makedirs(directory, 02777)
- self.wm.add_watch(directory, pyinotify.ALL_EVENTS, rec=True, auto_add=True)
- elif not os.path.isdir(directory):
- #path exists but it is a file not a directory!
- self.logger.error(u"path %s exists, but it is not a directory!!!", directory)
- finally:
- os.umask(omask)
-
- #moves file from source to dest but also recursively removes the
- #the source file's parent directories if they are now empty.
- def move_file(self, source, dest):
- try:
- omask = os.umask(0)
- os.rename(source, dest)
- except Exception, e:
- self.logger.error("failed to move file. %s", e)
- self.logger.error("traceback: %s", traceback.format_exc())
- finally:
- os.umask(omask)
-
- dir = os.path.dirname(source)
- self.cleanup_empty_dirs(dir)
-
- #keep moving up the file hierarchy and deleting parent
- #directories until we hit a non-empty directory, or we
- #hit the organize dir.
- def cleanup_empty_dirs(self, dir):
- if os.path.normpath(dir) != self.config.organize_directory:
- if len(os.listdir(dir)) == 0:
- try:
- os.rmdir(dir)
- self.cleanup_empty_dirs(os.path.dirname(dir))
- except Exception:
- #non-critical exception because we probably tried to delete a non-empty dir.
- #Don't need to log this, let's just "return"
- pass
-
-
-
- #checks if path exists already in stor. If the path exists and the md5s are the
- #same just overwrite.
- def create_unique_filename(self, filepath, old_filepath):
-
- try:
- if(os.path.exists(filepath)):
- self.logger.info("Path %s exists", filepath)
-
- self.logger.info("Checking if md5s are the same.")
- md5_fp = self.md_manager.get_md5(filepath)
- md5_ofp = self.md_manager.get_md5(old_filepath)
-
- if(md5_fp == md5_ofp):
- self.logger.info("Md5s are the same, moving to same filepath.")
- return filepath
-
- self.logger.info("Md5s aren't the same, appending to filepath.")
- file_dir = os.path.dirname(filepath)
- filename = os.path.basename(filepath).split(".")[0]
- #will be in the format .ext
- file_ext = os.path.splitext(filepath)[1]
- i = 1;
- while(True):
- new_filepath = '%s/%s(%s)%s' % (file_dir, filename, i, file_ext)
- self.logger.error("Trying %s", new_filepath)
-
- if(os.path.exists(new_filepath)):
- i = i + 1;
- else:
- filepath = new_filepath
- break
-
- except Exception, e:
- self.logger.error("Exception %s", e)
-
- return filepath
-
- #create path in /srv/airtime/stor/imported/[song-metadata]
- def create_file_path(self, original_path, orig_md):
-
- storage_directory = self.config.storage_directory
- try:
- #will be in the format .ext
- file_ext = os.path.splitext(original_path)[1].lower()
- path_md = ['MDATA_KEY_TITLE', 'MDATA_KEY_CREATOR', 'MDATA_KEY_SOURCE', 'MDATA_KEY_TRACKNUMBER', 'MDATA_KEY_BITRATE']
-
- md = {}
- for m in path_md:
- if m not in orig_md:
- md[m] = u'unknown'
- else:
- #get rid of any "/" which will interfere with the filepath.
- if isinstance(orig_md[m], basestring):
- md[m] = orig_md[m].replace("/", "-")
- else:
- md[m] = orig_md[m]
-
- if 'MDATA_KEY_TRACKNUMBER' in orig_md:
- #make sure all track numbers are at least 2 digits long in the filepath.
- md['MDATA_KEY_TRACKNUMBER'] = "%02d" % (int(md['MDATA_KEY_TRACKNUMBER']))
-
- #format bitrate as 128kbps
- md['MDATA_KEY_BITRATE'] = str(md['MDATA_KEY_BITRATE'] / 1000) + "kbps"
-
- filepath = None
- #file is recorded by Airtime
- #/srv/airtime/stor/recorded/year/month/year-month-day-time-showname-bitrate.ext
- if(md['MDATA_KEY_CREATOR'] == u"Airtime Show Recorder"):
- #yyyy-mm-dd-hh-MM-ss
- y = orig_md['MDATA_KEY_YEAR'].split("-")
- filepath = u'%s/%s/%s/%s/%s-%s-%s%s' % (storage_directory, "recorded", y[0], y[1], orig_md['MDATA_KEY_YEAR'], md['MDATA_KEY_TITLE'], md['MDATA_KEY_BITRATE'], file_ext)
-
- #"Show-Title-2011-03-28-17:15:00"
- title = md['MDATA_KEY_TITLE'].split("-")
- show_hour = title[0]
- show_min = title[1]
- show_sec = title[2]
- show_name = '-'.join(title[3:])
-
- new_md = {}
- new_md['MDATA_KEY_FILEPATH'] = os.path.normpath(original_path)
- new_md['MDATA_KEY_TITLE'] = '%s-%s-%s:%s:%s' % (show_name, orig_md['MDATA_KEY_YEAR'], show_hour, show_min, show_sec)
- self.md_manager.save_md_to_file(new_md)
-
- elif(md['MDATA_KEY_TRACKNUMBER'] == u'unknown'):
- filepath = u'%s/%s/%s/%s/%s-%s%s' % (storage_directory, "imported", md['MDATA_KEY_CREATOR'], md['MDATA_KEY_SOURCE'], md['MDATA_KEY_TITLE'], md['MDATA_KEY_BITRATE'], file_ext)
- else:
- filepath = u'%s/%s/%s/%s/%s-%s-%s%s' % (storage_directory, "imported", md['MDATA_KEY_CREATOR'], md['MDATA_KEY_SOURCE'], md['MDATA_KEY_TRACKNUMBER'], md['MDATA_KEY_TITLE'], md['MDATA_KEY_BITRATE'], file_ext)
-
- filepath = self.create_unique_filename(filepath, original_path)
- self.logger.info('Unique filepath: %s', filepath)
- self.ensure_is_dir(os.path.dirname(filepath))
-
- except Exception, e:
- self.logger.error('Exception: %s', e)
- self.logger.error("traceback: %s", traceback.format_exc())
-
- return filepath
-
- def exec_command(self, command):
- p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
- stdout, stderr = p.communicate()
- if p.returncode != 0:
- self.logger.warn("command \n%s\n return with a non-zero return value", command)
- self.logger.error(stderr)
-
- try:
- """
- File name charset encoding is UTF-8.
- """
- stdout = stdout.decode("UTF-8")
- except Exception:
- stdout = None
- self.logger.error("Could not decode %s using UTF-8" % stdout)
-
- return stdout
-
- def scan_dir_for_new_files(self, dir):
- command = self.find_command(directory=dir, extra_arguments="-type f -readable")
- self.logger.debug(command)
- stdout = self.exec_command(command)
-
- if stdout is None:
- return []
- else:
- return stdout.splitlines()
-
- def touch_index_file(self):
- dirname = os.path.dirname(self.timestamp_file)
- try:
- if not os.path.exists(dirname):
- os.makedirs(dirname)
- open(self.timestamp_file, "w")
- except Exception, e:
- self.logger.error('Exception: %s', e)
- self.logger.error("traceback: %s", traceback.format_exc())
-
- def organize_new_file(self, pathname, file_md):
- self.logger.info("Organizing new file: %s", pathname)
-
- filepath = self.create_file_path(pathname, file_md)
-
- self.logger.debug(u"Moving from %s to %s", pathname, filepath)
- self.move_file(pathname, filepath)
- self.make_readable(filepath)
- return filepath
-
- def test_file_playability(self, pathname):
- #when there is an single apostrophe inside of a string quoted by apostrophes, we can only escape it by replace that apostrophe
- #with '\''. This breaks the string into two, and inserts an escaped single quote in between them.
- #We run the command as pypo because otherwise the target file is opened with write permissions, and this causes an inotify ON_CLOSE_WRITE event
- #to be fired :/
- command = "sudo -u pypo airtime-liquidsoap -c 'output.dummy(audio_to_stereo(single(\"%s\")))' > /dev/null 2>&1" % pathname.replace("'", "'\\''")
- return_code = subprocess.call(command, shell=True)
- if return_code != 0:
- #print pathname for py-interpreter.log
- print pathname
- return (return_code == 0)
-
- def move_to_problem_dir(self, source):
- dest = os.path.join(self.config.problem_directory, os.path.basename(source))
- try:
- omask = os.umask(0)
- os.rename(source, dest)
- except Exception, e:
- self.logger.error("failed to move file. %s", e)
- self.logger.error("traceback: %s", traceback.format_exc())
- finally:
- os.umask(omask)
-
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/replaygain.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/replaygain.py
deleted file mode 100644
index f5c29a538..000000000
--- a/python_apps/media-monitor/media_monitor/airtimefilemonitor/replaygain.py
+++ /dev/null
@@ -1,142 +0,0 @@
-from subprocess import Popen, PIPE
-import re
-import os
-import sys
-import shutil
-import tempfile
-import logging
-
-
-logger = logging.getLogger()
-
-def get_process_output(command):
- """
- Run subprocess and return stdout
- """
- #logger.debug(command)
- p = Popen(command, shell=True, stdout=PIPE)
- return p.communicate()[0].strip()
-
-def run_process(command):
- """
- Run subprocess and return "return code"
- """
- p = Popen(command, shell=True)
- return os.waitpid(p.pid, 0)[1]
-
-def get_mime_type(file_path):
- """
- Attempts to get the mime type but will return prematurely if the process
- takes longer than 5 seconds. Note that this function should only be called
- for files which do not have a mp3/ogg/flac extension.
- """
-
- return get_process_output("timeout 5 file -b --mime-type %s" % file_path)
-
-def duplicate_file(file_path):
- """
- Makes a duplicate of the file and returns the path of this duplicate file.
- """
- fsrc = open(file_path, 'r')
- fdst = tempfile.NamedTemporaryFile(delete=False)
-
- #logger.info("Copying %s to %s" % (file_path, fdst.name))
-
- shutil.copyfileobj(fsrc, fdst)
-
- fsrc.close()
- fdst.close()
-
- return fdst.name
-
-def get_file_type(file_path):
- file_type = None
- if re.search(r'mp3$', file_path, re.IGNORECASE):
- file_type = 'mp3'
- elif re.search(r'og(g|a)$', file_path, re.IGNORECASE):
- file_type = 'vorbis'
- elif re.search(r'flac$', file_path, re.IGNORECASE):
- file_type = 'flac'
- elif re.search(r'(mp4|m4a)$', file_path, re.IGNORECASE):
- file_type = 'mp4'
- else:
- mime_type = get_mime_type(file_path) == "audio/mpeg"
- if 'mpeg' in mime_type:
- file_type = 'mp3'
- elif 'ogg' in mime_type or "oga" in mime_type:
- file_type = 'vorbis'
- elif 'flac' in mime_type:
- file_type = 'flac'
- elif 'mp4' in mime_type or "m4a" in mime_type:
- file_type = 'mp4'
-
- return file_type
-
-
-def calculate_replay_gain(file_path):
- """
- This function accepts files of type mp3/ogg/flac and returns a calculated
- ReplayGain value in dB. If the value cannot be calculated for some reason,
- then we default to 0 (Unity Gain).
-
- http://wiki.hydrogenaudio.org/index.php?title=ReplayGain_1.0_specification
- """
-
- try:
- """
- Making a duplicate is required because the ReplayGain extraction
- utilities we use make unwanted modifications to the file.
- """
-
- search = None
- temp_file_path = duplicate_file(file_path)
-
- file_type = get_file_type(file_path)
-
- if file_type:
- if file_type == 'mp3':
- if run_process("which mp3gain > /dev/null") == 0:
- out = get_process_output('mp3gain -q "%s" 2> /dev/null' % temp_file_path)
- search = re.search(r'Recommended "Track" dB change: (.*)', out)
- else:
- logger.warn("mp3gain not found")
- elif file_type == 'vorbis':
- if run_process("which vorbisgain > /dev/null && which ogginfo > /dev/null") == 0:
- run_process('vorbisgain -q -f "%s" 2>/dev/null >/dev/null' % temp_file_path)
- out = get_process_output('ogginfo "%s"' % temp_file_path)
- search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
- else:
- logger.warn("vorbisgain/ogginfo not found")
- elif file_type == 'flac':
- if run_process("which metaflac > /dev/null") == 0:
- out = get_process_output('metaflac --show-tag=REPLAYGAIN_TRACK_GAIN "%s"' % temp_file_path)
- search = re.search(r'REPLAYGAIN_TRACK_GAIN=(.*) dB', out)
- else:
- logger.warn("metaflac not found")
- elif file_type == 'mp4':
- if run_process("which aacgain > /dev/null") == 0:
- out = get_process_output('aacgain -q "%s" 2> /dev/null' % temp_file_path)
- search = re.search(r'Recommended "Track" dB change: (.*)', out)
- else:
- logger.warn("aacgain not found")
- else:
- pass
-
- #no longer need the temp, file simply remove it.
- os.remove(temp_file_path)
- except Exception, e:
- logger.error(str(e))
-
- replay_gain = 0
- if search:
- matches = search.groups()
- if len(matches) == 1:
- replay_gain = matches[0]
-
- return replay_gain
-
-
-# Example of running from command line:
-# python replay_gain.py /path/to/filename.mp3
-if __name__ == "__main__":
- print calculate_replay_gain(sys.argv[1])
diff --git a/python_apps/media-monitor/media_monitor/airtimefilemonitor/workerprocess.py b/python_apps/media-monitor/media_monitor/airtimefilemonitor/workerprocess.py
deleted file mode 100644
index 73bde5045..000000000
--- a/python_apps/media-monitor/media_monitor/airtimefilemonitor/workerprocess.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import traceback
-import os
-
-class MediaMonitorWorkerProcess:
-
- def __init__(self, config, mmc):
- self.config = config
- self.mmc = mmc
-
- #this function is run in its own process, and continuously
- #checks the queue for any new file events.
- def process_file_events(self, queue, notifier):
- while True:
- try:
- event = queue.get()
- notifier.logger.info("received event %s", event)
- notifier.update_airtime(event)
- except Exception, e:
- notifier.logger.error(e)
- notifier.logger.error("traceback: %s", traceback.format_exc())
diff --git a/python_apps/media-monitor/media_monitor/mm1.py b/python_apps/media-monitor/media_monitor/mm1.py
deleted file mode 100644
index 615b1c7d1..000000000
--- a/python_apps/media-monitor/media_monitor/mm1.py
+++ /dev/null
@@ -1,142 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import time
-import logging.config
-import sys
-import os
-import traceback
-import locale
-
-from configobj import ConfigObj
-
-from api_clients import api_client as apc
-from std_err_override import LogWriter
-
-from multiprocessing import Queue as mpQueue
-
-from threading import Thread
-
-from pyinotify import WatchManager
-
-from airtimefilemonitor.airtimenotifier import AirtimeNotifier
-from airtimefilemonitor.mediamonitorcommon import MediaMonitorCommon
-from airtimefilemonitor.airtimeprocessevent import AirtimeProcessEvent
-from airtimefilemonitor.mediaconfig import AirtimeMediaConfig
-from airtimefilemonitor.workerprocess import MediaMonitorWorkerProcess
-from airtimefilemonitor.airtimemediamonitorbootstrap import AirtimeMediaMonitorBootstrap
-
-def configure_locale():
- logger.debug("Before %s", locale.nl_langinfo(locale.CODESET))
- current_locale = locale.getlocale()
-
- if current_locale[1] is None:
- logger.debug("No locale currently set. Attempting to get default locale.")
- default_locale = locale.getdefaultlocale()
-
- if default_locale[1] is None:
- logger.debug("No default locale exists. Let's try loading from /etc/default/locale")
- if os.path.exists("/etc/default/locale"):
- config = ConfigObj('/etc/default/locale')
- lang = config.get('LANG')
- new_locale = lang
- else:
- logger.error("/etc/default/locale could not be found! Please run 'sudo update-locale' from command-line.")
- sys.exit(1)
- else:
- new_locale = default_locale
-
- logger.info("New locale set to: %s", locale.setlocale(locale.LC_ALL, new_locale))
-
-
-
- reload(sys)
- sys.setdefaultencoding("UTF-8")
- current_locale_encoding = locale.getlocale()[1].lower()
- logger.debug("sys default encoding %s", sys.getdefaultencoding())
- logger.debug("After %s", locale.nl_langinfo(locale.CODESET))
-
- if current_locale_encoding not in ['utf-8', 'utf8']:
- logger.error("Need a UTF-8 locale. Currently '%s'. Exiting..." % current_locale_encoding)
- sys.exit(1)
-
-# configure logging
-try:
- logging.config.fileConfig("%s/logging.cfg" % os.path.dirname(os.path.realpath(__file__)))
-
- #need to wait for Python 2.7 for this..
- #logging.captureWarnings(True)
-
- logger = logging.getLogger()
- LogWriter.override_std_err(logger)
-
-except Exception, e:
- print 'Error configuring logging: ', e
- sys.exit(1)
-
-logger.info("\n\n*** Media Monitor bootup ***\n\n")
-
-
-try:
- configure_locale()
-
- config = AirtimeMediaConfig(logger)
- api_client = apc.AirtimeApiClient()
- api_client.register_component("media-monitor")
-
- logger.info("Setting up monitor")
- response = None
- while response is None:
- response = api_client.setup_media_monitor()
- time.sleep(5)
-
- storage_directory = response["stor"]
- watched_dirs = response["watched_dirs"]
- logger.info("Storage Directory is: %s", storage_directory)
- config.storage_directory = os.path.normpath(storage_directory)
- config.imported_directory = os.path.normpath(os.path.join(storage_directory, 'imported'))
- config.organize_directory = os.path.normpath(os.path.join(storage_directory, 'organize'))
- config.recorded_directory = os.path.normpath(os.path.join(storage_directory, 'recorded'))
- config.problem_directory = os.path.normpath(os.path.join(storage_directory, 'problem_files'))
-
- dirs = [config.imported_directory, config.organize_directory, config.recorded_directory, config.problem_directory]
- for d in dirs:
- if not os.path.exists(d):
- os.makedirs(d, 02775)
-
- multi_queue = mpQueue()
- logger.info("Initializing event processor")
-
- wm = WatchManager()
- mmc = MediaMonitorCommon(config, wm=wm)
- pe = AirtimeProcessEvent(queue=multi_queue, airtime_config=config, wm=wm, mmc=mmc, api_client=api_client)
-
- bootstrap = AirtimeMediaMonitorBootstrap(logger, pe, api_client, mmc, wm, config)
- bootstrap.scan()
-
- notifier = AirtimeNotifier(wm, pe, read_freq=0, timeout=0, airtime_config=config, api_client=api_client, bootstrap=bootstrap, mmc=mmc)
- notifier.coalesce_events()
-
- #create 5 worker threads
- wp = MediaMonitorWorkerProcess(config, mmc)
- for i in range(5):
- threadName = "Thread #%d" % i
- t = Thread(target=wp.process_file_events, name=threadName, args=(multi_queue, notifier))
- t.start()
-
- wdd = notifier.watch_directory(storage_directory)
- logger.info("Added watch to %s", storage_directory)
- logger.info("wdd result %s", wdd[storage_directory])
-
- for dir in watched_dirs:
- wdd = notifier.watch_directory(dir)
- logger.info("Added watch to %s", dir)
- logger.info("wdd result %s", wdd[dir])
-
- notifier.loop(callback=pe.notifier_loop_callback)
-
-except KeyboardInterrupt:
- notifier.stop()
- logger.info("Keyboard Interrupt")
-except Exception, e:
- logger.error('Exception: %s', e)
- logger.error("traceback: %s", traceback.format_exc())
diff --git a/python_apps/media-monitor/mm2/__init__.py b/python_apps/media-monitor/mm2/__init__.py
deleted file mode 100644
index 8b1378917..000000000
--- a/python_apps/media-monitor/mm2/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/python_apps/media-monitor/mm2/baby.py b/python_apps/media-monitor/mm2/baby.py
deleted file mode 100644
index c2e92afb7..000000000
--- a/python_apps/media-monitor/mm2/baby.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-import re
-from media.saas.launcher import setup_logger, setup_global, MM2
-from media.saas.airtimeinstance import AirtimeInstance
-from os.path import isdir, join, abspath, exists, dirname
-from os import listdir
-
-def list_dirs(d): return (x for x in listdir(d) if isdir(join(d,x)))
-
-def filter_instance(d): return bool(re.match('.+\d+$',d))
-
-def get_name(p): return re.match('.+/(\d+)$',p).group(1)
-
-def filter_instances(l): return (x for x in l if filter_instance(x))
-
-def autoscan_instances(main_cfg):
- root = main_cfg['instance_root']
- instances = []
- for instance_machine in list_dirs(root):
- instance_machine = join(root, instance_machine)
- for instance_root in filter_instances(list_dirs(instance_machine)):
- full_path = abspath(join(instance_machine,instance_root))
- ai = AirtimeInstance.root_make(get_name(full_path), full_path)
- instances.append(ai)
- return instances
-
-def verify_exists(p):
- if not exists(p): raise Exception("%s must exist" % p)
-
-def main(main_cfg):
- log_config, log_path = main_cfg['log_config'], main_cfg['log_path']
- verify_exists(log_config)
- log = setup_logger(log_config, log_path)
- setup_global(log)
- for instance in autoscan_instances(main_cfg):
- print("Launching instance: %s" % str(instance))
- #MM2(instance).start()
- print("Launched all instances")
-
-if __name__ == '__main__':
- pwd = dirname(__file__)
- default = {
- 'log_path' : join(pwd, 'test.log'), # config for log
- 'log_config' : join(pwd, 'configs/logging.cfg'), # where to log
- # root dir of all instances
- 'instance_root' : '/mnt/airtimepro/instances'
- }
- main(default)
diff --git a/python_apps/media-monitor/mm2/configs/__init__.py b/python_apps/media-monitor/mm2/configs/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/python_apps/media-monitor/mm2/configs/logging.cfg b/python_apps/media-monitor/mm2/configs/logging.cfg
deleted file mode 100644
index ea24f69e0..000000000
--- a/python_apps/media-monitor/mm2/configs/logging.cfg
+++ /dev/null
@@ -1,32 +0,0 @@
-[loggers]
-keys= root,notifier,metadata
-
-[handlers]
-keys=fileOutHandler
-
-[formatters]
-keys=simpleFormatter
-
-[logger_root]
-level=DEBUG
-handlers=fileOutHandler
-
-[logger_notifier]
-level=DEBUG
-handlers=fileOutHandler
-qualname=notifier
-
-[logger_metadata]
-level=DEBUG
-handlers=fileOutHandler
-qualname=metadata
-
-[handler_fileOutHandler]
-class=logging.handlers.RotatingFileHandler
-level=DEBUG
-formatter=simpleFormatter
-args=("/var/log/airtime/media-monitor/media-monitor.log", 'a', 10000000, 5,)
-
-[formatter_simpleFormatter]
-format=%(asctime)s %(levelname)s - [%(threadName)s] [%(filename)s : %(funcName)s()] : LINE %(lineno)d - %(message)s
-datefmt=
diff --git a/python_apps/media-monitor/mm2/media/__init__.py b/python_apps/media-monitor/mm2/media/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/python_apps/media-monitor/mm2/media/metadata/__init__.py b/python_apps/media-monitor/mm2/media/metadata/__init__.py
deleted file mode 100644
index 8b1378917..000000000
--- a/python_apps/media-monitor/mm2/media/metadata/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/python_apps/media-monitor/mm2/media/metadata/definitions.py b/python_apps/media-monitor/mm2/media/metadata/definitions.py
deleted file mode 100644
index 883a20acb..000000000
--- a/python_apps/media-monitor/mm2/media/metadata/definitions.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# -*- coding: utf-8 -*-
-import process as md
-import re
-from os.path import normpath
-from ..monitor.pure import format_length, file_md5, is_airtime_recorded, \
- no_extension_basename
-
-defs_loaded = False
-
-MAX_SIGNED_INT = 2**31-1
-
-def is_defs_loaded():
- global defs_loaded
- return defs_loaded
-
-def load_definitions():
- with md.metadata('MDATA_KEY_DURATION') as t:
- t.default(u'0.0')
- t.depends('length')
- t.translate(lambda k: format_length(k['length']))
-
- with md.metadata('MDATA_KEY_CUE_IN') as t:
- t.default(u'0.0')
- t.depends('cuein')
- t.translate(lambda k: format_length(k['cuein']))
-
- with md.metadata('MDATA_KEY_CUE_OUT') as t:
- t.default(u'0.0')
- t.depends('cueout')
- t.translate(lambda k: format_length(k['cueout']))
-
- with md.metadata('MDATA_KEY_MIME') as t:
- t.default(u'')
- t.depends('mime')
- # Is this necessary?
- t.translate(lambda k: k['mime'].replace('audio/vorbis','audio/ogg'))
-
- with md.metadata('MDATA_KEY_BITRATE') as t:
- t.default(u'')
- t.depends('bitrate')
- t.translate(lambda k: k['bitrate'])
- t.max_value(MAX_SIGNED_INT)
-
- with md.metadata('MDATA_KEY_SAMPLERATE') as t:
- t.default(u'0')
- t.depends('sample_rate')
- t.translate(lambda k: k['sample_rate'])
- t.max_value(MAX_SIGNED_INT)
-
- with md.metadata('MDATA_KEY_FTYPE') as t:
- t.depends('ftype') # i don't think this field even exists
- t.default(u'audioclip')
- t.translate(lambda k: k['ftype']) # but just in case
-
- with md.metadata("MDATA_KEY_CREATOR") as t:
- t.depends("artist")
- # A little kludge to make sure that we have some value for when we parse
- # MDATA_KEY_TITLE
- t.default(u"")
- t.max_length(512)
-
- with md.metadata("MDATA_KEY_SOURCE") as t:
- t.depends("album")
- t.max_length(512)
-
- with md.metadata("MDATA_KEY_GENRE") as t:
- t.depends("genre")
- t.max_length(64)
-
- with md.metadata("MDATA_KEY_MOOD") as t:
- t.depends("mood")
- t.max_length(64)
-
- with md.metadata("MDATA_KEY_TRACKNUMBER") as t:
- t.depends("tracknumber")
- t.max_value(MAX_SIGNED_INT)
-
- with md.metadata("MDATA_KEY_BPM") as t:
- t.depends("bpm")
- t.max_value(MAX_SIGNED_INT)
-
- with md.metadata("MDATA_KEY_LABEL") as t:
- t.depends("organization")
- t.max_length(512)
-
- with md.metadata("MDATA_KEY_COMPOSER") as t:
- t.depends("composer")
- t.max_length(512)
-
- with md.metadata("MDATA_KEY_ENCODER") as t:
- t.depends("encodedby")
- t.max_length(512)
-
- with md.metadata("MDATA_KEY_CONDUCTOR") as t:
- t.depends("conductor")
- t.max_length(512)
-
- with md.metadata("MDATA_KEY_YEAR") as t:
- t.depends("date")
- t.max_length(16)
-
- with md.metadata("MDATA_KEY_URL") as t:
- t.depends("website")
-
- with md.metadata("MDATA_KEY_ISRC") as t:
- t.depends("isrc")
- t.max_length(512)
-
- with md.metadata("MDATA_KEY_COPYRIGHT") as t:
- t.depends("copyright")
- t.max_length(512)
-
- with md.metadata("MDATA_KEY_ORIGINAL_PATH") as t:
- t.depends('path')
- t.translate(lambda k: unicode(normpath(k['path'])))
-
- with md.metadata("MDATA_KEY_MD5") as t:
- t.depends('path')
- t.optional(False)
- t.translate(lambda k: file_md5(k['path'], max_length=100))
-
- # owner is handled differently by (by events.py)
-
- # MDATA_KEY_TITLE is the annoying special case b/c we sometimes read it
- # from file name
-
-
- # must handle 3 cases:
- # 1. regular case (not recorded + title is present)
- # 2. title is absent (read from file)
- # 3. recorded file
- def tr_title(k):
- #unicode_unknown = u"unknown"
- new_title = u""
- if is_airtime_recorded(k) or k['title'] != u"":
- new_title = k['title']
- else:
- default_title = no_extension_basename(k['path'])
- default_title = re.sub(r'__\d+\.',u'.', default_title)
-
- # format is: track_number-title-123kbps.mp3
- m = re.match(".+?-(?P
.+)-(\d+kbps|unknown)$", default_title)
- if m: new_title = m.group('title')
- else: new_title = re.sub(r'-\d+kbps$', u'', default_title)
-
- return new_title
-
- with md.metadata('MDATA_KEY_TITLE') as t:
- # Need to know MDATA_KEY_CREATOR to know if show was recorded. Value is
- # defaulted to "" from definitions above
- t.depends('title','MDATA_KEY_CREATOR','path')
- t.optional(False)
- t.translate(tr_title)
- t.max_length(512)
-
- with md.metadata('MDATA_KEY_LABEL') as t:
- t.depends('label')
- t.max_length(512)
diff --git a/python_apps/media-monitor/mm2/media/metadata/process.py b/python_apps/media-monitor/mm2/media/metadata/process.py
deleted file mode 100644
index 178681cd8..000000000
--- a/python_apps/media-monitor/mm2/media/metadata/process.py
+++ /dev/null
@@ -1,237 +0,0 @@
-# -*- coding: utf-8 -*-
-from contextlib import contextmanager
-from ..monitor.pure import truncate_to_value, truncate_to_length, toposort
-from os.path import normpath
-from ..monitor.exceptions import BadSongFile
-from ..monitor.log import Loggable
-from ..monitor import pure as mmp
-from collections import namedtuple
-import mutagen
-import subprocess
-import json
-import logging
-
-class FakeMutagen(dict):
- """
- Need this fake mutagen object so that airtime_special functions
- return a proper default value instead of throwing an exceptions for
- files that mutagen doesn't recognize
- """
- FakeInfo = namedtuple('FakeInfo','length bitrate')
- def __init__(self,path):
- self.path = path
- self.mime = ['audio/wav']
- self.info = FakeMutagen.FakeInfo(0.0, '')
- dict.__init__(self)
- def set_length(self,l):
- old_bitrate = self.info.bitrate
- self.info = FakeMutagen.FakeInfo(l, old_bitrate)
-
-
-class MetadataAbsent(Exception):
- def __init__(self, name): self.name = name
- def __str__(self): return "Could not obtain element '%s'" % self.name
-
-class MetadataElement(Loggable):
-
- def __init__(self,name):
- self.name = name
- # "Sane" defaults
- self.__deps = set()
- self.__normalizer = lambda x: x
- self.__optional = True
- self.__default = None
- self.__is_normalized = lambda _ : True
- self.__max_length = -1
- self.__max_value = -1
- self.__translator = None
-
- def max_length(self,l):
- self.__max_length = l
-
- def max_value(self,v):
- self.__max_value = v
-
- def optional(self, setting):
- self.__optional = setting
-
- def is_optional(self):
- return self.__optional
-
- def depends(self, *deps):
- self.__deps = set(deps)
-
- def dependencies(self):
- return self.__deps
-
- def translate(self, f):
- self.__translator = f
-
- def is_normalized(self, f):
- self.__is_normalized = f
-
- def normalize(self, f):
- self.__normalizer = f
-
- def default(self,v):
- self.__default = v
-
- def get_default(self):
- if hasattr(self.__default, '__call__'): return self.__default()
- else: return self.__default
-
- def has_default(self):
- return self.__default is not None
-
- def path(self):
- return self.__path
-
- def __slice_deps(self, d):
- """
- returns a dictionary of all the key value pairs in d that are also
- present in self.__deps
- """
- return dict( (k,v) for k,v in d.iteritems() if k in self.__deps)
-
- def __str__(self):
- return "%s(%s)" % (self.name, ' '.join(list(self.__deps)))
-
- def read_value(self, path, original, running={}):
-
- # If value is present and normalized then we only check if it's
- # normalized or not. We normalize if it's not normalized already
-
- if self.name in original:
- v = original[self.name]
- if self.__is_normalized(v): return v
- else: return self.__normalizer(v)
-
- # We slice out only the dependencies that are required for the metadata
- # element.
- dep_slice_orig = self.__slice_deps(original)
- dep_slice_running = self.__slice_deps(running)
- # TODO : remove this later
- dep_slice_special = self.__slice_deps({'path' : path})
- # We combine all required dependencies into a single dictionary
- # that we will pass to the translator
- full_deps = dict( dep_slice_orig.items()
- + dep_slice_running.items()
- + dep_slice_special.items())
-
- # check if any dependencies are absent
- # note: there is no point checking the case that len(full_deps) >
- # len(self.__deps) because we make sure to "slice out" any supefluous
- # dependencies above.
- if len(full_deps) != len(self.dependencies()) or \
- len(self.dependencies()) == 0:
- # If we have a default value then use that. Otherwise throw an
- # exception
- if self.has_default(): return self.get_default()
- else: raise MetadataAbsent(self.name)
-
- # We have all dependencies. Now for actual for parsing
- def def_translate(dep):
- def wrap(k):
- e = [ x for x in dep ][0]
- return k[e]
- return wrap
-
- # Only case where we can select a default translator
- if self.__translator is None:
- self.translate(def_translate(self.dependencies()))
- if len(self.dependencies()) > 2: # dependencies include themselves
- self.logger.info("Ignoring some dependencies in translate %s"
- % self.name)
- self.logger.info(self.dependencies())
-
- r = self.__normalizer( self.__translator(full_deps) )
- if self.__max_length != -1:
- r = truncate_to_length(r, self.__max_length)
- if self.__max_value != -1:
- try: r = truncate_to_value(r, self.__max_value)
- except ValueError, e: r = ''
- return r
-
-def normalize_mutagen(path):
- """
- Consumes a path and reads the metadata using mutagen. normalizes some of
- the metadata that isn't read through the mutagen hash
- """
- if not mmp.file_playable(path): raise BadSongFile(path)
- try : m = mutagen.File(path, easy=True)
- except Exception : raise BadSongFile(path)
- if m is None: m = FakeMutagen(path)
- try:
- if mmp.extension(path) == 'wav':
- m.set_length(mmp.read_wave_duration(path))
- except Exception: raise BadSongFile(path)
- md = {}
- for k,v in m.iteritems():
- if type(v) is list:
- if len(v) > 0: md[k] = v[0]
- else: md[k] = v
- # populate special metadata values
- md['length'] = getattr(m.info, 'length', 0.0)
- md['bitrate'] = getattr(m.info, 'bitrate', u'')
- md['sample_rate'] = getattr(m.info, 'sample_rate', 0)
- md['mime'] = m.mime[0] if len(m.mime) > 0 else u''
- md['path'] = normpath(path)
-
- # silence detect(set default cue in and out)
- #try:
- #command = ['silan', '-b', '-f', 'JSON', md['path']]
- #proc = subprocess.Popen(command, stdout=subprocess.PIPE)
- #out = proc.communicate()[0].strip('\r\n')
-
- #info = json.loads(out)
- #md['cuein'] = info['sound'][0][0]
- #md['cueout'] = info['sound'][0][1]
- #except Exception:
- #self.logger.debug('silan is missing')
-
- if 'title' not in md: md['title'] = u''
- return md
-
-
-class OverwriteMetadataElement(Exception):
- def __init__(self, m): self.m = m
- def __str__(self): return "Trying to overwrite: %s" % self.m
-
-class MetadataReader(object):
- def __init__(self):
- self.clear()
-
- def register_metadata(self,m):
- if m in self.__mdata_name_map:
- raise OverwriteMetadataElement(m)
- self.__mdata_name_map[m.name] = m
- d = dict( (name,m.dependencies()) for name,m in
- self.__mdata_name_map.iteritems() )
- new_list = list( toposort(d) )
- self.__metadata = [ self.__mdata_name_map[name] for name in new_list
- if name in self.__mdata_name_map]
-
- def clear(self):
- self.__mdata_name_map = {}
- self.__metadata = []
-
- def read(self, path, muta_hash):
- normalized_metadata = {}
- for mdata in self.__metadata:
- try:
- normalized_metadata[mdata.name] = mdata.read_value(
- path, muta_hash, normalized_metadata)
- except MetadataAbsent:
- if not mdata.is_optional(): raise
- return normalized_metadata
-
- def read_mutagen(self, path):
- return self.read(path, normalize_mutagen(path))
-
-global_reader = MetadataReader()
-
-@contextmanager
-def metadata(name):
- t = MetadataElement(name)
- yield t
- global_reader.register_metadata(t)
diff --git a/python_apps/media-monitor/mm2/media/monitor/__init__.py b/python_apps/media-monitor/mm2/media/monitor/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/python_apps/media-monitor/mm2/media/monitor/airtime.py b/python_apps/media-monitor/mm2/media/monitor/airtime.py
deleted file mode 100644
index ce0e581d9..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/airtime.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# -*- coding: utf-8 -*-
-from kombu.messaging import Exchange, Queue, Consumer
-from kombu.connection import BrokerConnection
-from kombu.simple import SimpleQueue
-from os.path import normpath
-
-import json
-import os
-import copy
-import time
-
-from exceptions import BadSongFile, InvalidMetadataElement, DirectoryIsNotListed
-from metadata import Metadata
-from log import Loggable
-from syncdb import AirtimeDB
-from bootstrap import Bootstrapper
-
-from ..saas.thread import apc, user
-
-class AirtimeNotifier(Loggable):
- """
- AirtimeNotifier is responsible for interecepting RabbitMQ messages and
- feeding them to the event_handler object it was initialized with. The only
- thing it does to the messages is parse them from json
- """
- def __init__(self, cfg, message_receiver):
- self.cfg = cfg
- self.handler = message_receiver
- while not self.init_rabbit_mq():
- self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds")
- time.sleep(5)
-
- def init_rabbit_mq(self):
- try:
- self.logger.info("Initializing RabbitMQ message consumer...")
- schedule_exchange = Exchange("airtime-media-monitor", "direct",
- durable=True, auto_delete=True)
- schedule_queue = Queue("media-monitor", exchange=schedule_exchange,
- key="filesystem")
- self.connection = BrokerConnection(self.cfg["rabbitmq"]["host"],
- self.cfg["rabbitmq"]["user"], self.cfg["rabbitmq"]["password"],
- self.cfg["rabbitmq"]["vhost"])
- channel = self.connection.channel()
-
- self.simple_queue = SimpleQueue(channel, schedule_queue)
-
- self.logger.info("Initialized RabbitMQ consumer.")
- except Exception as e:
- self.logger.info("Failed to initialize RabbitMQ consumer")
- self.logger.error(e)
- return False
-
- return True
-
-
- def handle_message(self, message):
- """
- Messages received from RabbitMQ are handled here. These messages
- instruct media-monitor of events such as a new directory being watched,
- file metadata has been changed, or any other changes to the config of
- media-monitor via the web UI.
- """
- self.logger.info("Received md from RabbitMQ: %s" % str(message))
- m = json.loads(message)
- # TODO : normalize any other keys that could be used to pass
- # directories
- if 'directory' in m: m['directory'] = normpath(m['directory'])
- self.handler.message(m)
-
-class AirtimeMessageReceiver(Loggable):
- def __init__(self, cfg, manager):
- self.dispatch_table = {
- 'md_update' : self.md_update,
- 'new_watch' : self.new_watch,
- 'remove_watch' : self.remove_watch,
- 'rescan_watch' : self.rescan_watch,
- 'change_stor' : self.change_storage,
- 'file_delete' : self.file_delete,
- }
- self.cfg = cfg
- self.manager = manager
-
- def message(self, msg):
- """
- This method is called by an AirtimeNotifier instance that
- consumes the Rabbit MQ events that trigger this. The method
- return true when the event was executed and false when it wasn't.
- """
- msg = copy.deepcopy(msg)
- if msg['event_type'] in self.dispatch_table:
- evt = msg['event_type']
- del msg['event_type']
- self.logger.info("Handling RabbitMQ message: '%s'" % evt)
- self._execute_message(evt,msg)
- return True
- else:
- self.logger.info("Received invalid message with 'event_type': '%s'"
- % msg['event_type'])
- self.logger.info("Message details: %s" % str(msg))
- return False
- def _execute_message(self,evt,message):
- self.dispatch_table[evt](message)
-
- def __request_now_bootstrap(self, directory_id=None, directory=None,
- all_files=True):
- if (not directory_id) and (not directory):
- raise ValueError("You must provide either directory_id or \
- directory")
- sdb = AirtimeDB(apc())
- if directory : directory = os.path.normpath(directory)
- if directory_id == None : directory_id = sdb.to_id(directory)
- if directory == None : directory = sdb.to_directory(directory_id)
- try:
- bs = Bootstrapper( sdb, self.manager.watch_signal() )
- bs.flush_watch( directory=directory, last_ran=self.cfg.last_ran() )
- except Exception as e:
- self.fatal_exception("Exception bootstrapping: (dir,id)=(%s,%s)" %
- (directory, directory_id), e)
- raise DirectoryIsNotListed(directory, cause=e)
-
- def md_update(self, msg):
- self.logger.info("Updating metadata for: '%s'" %
- msg['MDATA_KEY_FILEPATH'])
- md_path = msg['MDATA_KEY_FILEPATH']
- try: Metadata.write_unsafe(path=md_path, md=msg)
- except BadSongFile as e:
- self.logger.info("Cannot find metadata file: '%s'" % e.path)
- except InvalidMetadataElement as e:
- self.logger.info("Metadata instance not supported for this file '%s'" \
- % e.path)
- self.logger.info(str(e))
- except Exception as e:
- # TODO : add md_path to problem path or something?
- self.fatal_exception("Unknown error when writing metadata to: '%s'"
- % md_path, e)
-
- def new_watch(self, msg, restart=False):
- msg['directory'] = normpath(msg['directory'])
- self.logger.info("Creating watch for directory: '%s'" %
- msg['directory'])
- if not os.path.exists(msg['directory']):
- try: os.makedirs(msg['directory'])
- except Exception as e:
- self.fatal_exception("Failed to create watched dir '%s'" %
- msg['directory'],e)
- else:
- self.logger.info("Created new watch directory: '%s'" %
- msg['directory'])
- self.new_watch(msg)
- else:
- self.__request_now_bootstrap( directory=msg['directory'],
- all_files=restart)
- self.manager.add_watch_directory(msg['directory'])
-
- def remove_watch(self, msg):
- msg['directory'] = normpath(msg['directory'])
- self.logger.info("Removing watch from directory: '%s'" %
- msg['directory'])
- self.manager.remove_watch_directory(msg['directory'])
-
- def rescan_watch(self, msg):
- self.logger.info("Trying to rescan watched directory: '%s'" %
- msg['directory'])
- try:
- # id is always an integer but in the dictionary the key is always a
- # string
- self.__request_now_bootstrap( unicode(msg['id']) )
- except DirectoryIsNotListed as e:
- self.fatal_exception("Bad rescan request", e)
- except Exception as e:
- self.fatal_exception("Bad rescan request. Unknown error.", e)
- else:
- self.logger.info("Successfully re-scanned: '%s'" % msg['directory'])
-
- def change_storage(self, msg):
- new_storage_directory = msg['directory']
- self.manager.change_storage_root(new_storage_directory)
- for to_bootstrap in [ self.manager.get_recorded_path(),
- self.manager.get_imported_path() ]:
- self.__request_now_bootstrap( directory=to_bootstrap )
-
- def file_delete(self, msg):
- # Deletes should be requested only from imported folder but we
- # don't verify that. Security risk perhaps?
- # we only delete if we are passed the special delete flag that is
- # necessary with every "delete_file" request
- if not msg['delete']:
- self.logger.info("No clippy confirmation, ignoring event. \
- Out of curiousity we will print some details.")
- self.logger.info(msg)
- return
- # TODO : Add validation that we are deleting a file that's under our
- # surveillance. We don't to delete some random system file.
- if os.path.exists(msg['filepath']):
- try:
- self.logger.info("Attempting to delete '%s'" %
- msg['filepath'])
- # We use FileMediator to ignore any paths with
- # msg['filepath'] so that we do not send a duplicate delete
- # request that we'd normally get form pyinotify. But right
- # now event contractor would take care of this sort of
- # thing anyway so this might not be necessary after all
- #user().file_mediator.ignore(msg['filepath'])
- os.unlink(msg['filepath'])
- # Verify deletion:
- if not os.path.exists(msg['filepath']):
- self.logger.info("Successfully deleted: '%s'" %
- msg['filepath'])
- except Exception as e:
- self.fatal_exception("Failed to delete '%s'" % msg['filepath'],
- e)
- else: # validation for filepath existence failed
- self.logger.info("Attempting to delete file '%s' that does not \
- exist. Full request:" % msg['filepath'])
- self.logger.info(msg)
diff --git a/python_apps/media-monitor/mm2/media/monitor/bootstrap.py b/python_apps/media-monitor/mm2/media/monitor/bootstrap.py
deleted file mode 100644
index 4ae6d0140..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/bootstrap.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import os
-from pydispatch import dispatcher
-from events import NewFile, DeleteFile, ModifyFile
-from log import Loggable
-from ..saas.thread import getsig
-import pure as mmp
-
-class Bootstrapper(Loggable):
- """
- Bootstrapper reads all the info in the filesystem flushes organize events
- and watch events
- """
- def __init__(self,db,watch_signal):
- """
- db - AirtimeDB object; small layer over api client
- last_ran - last time the program was ran.
- watch_signal - the signals should send events for every file on.
- """
- self.db = db
- self.watch_signal = getsig(watch_signal)
-
- def flush_all(self, last_ran):
- """
- bootstrap every single watched directory. only useful at startup note
- that because of the way list_directories works we also flush the import
- directory as well I think
- """
- for d in self.db.list_storable_paths(): self.flush_watch(d, last_ran)
-
- def flush_watch(self, directory, last_ran, all_files=False):
- """
- flush a single watch/imported directory. useful when wanting to to
- rescan, or add a watched/imported directory
- """
- songs = set([])
- added = modded = deleted = 0
- for f in mmp.walk_supported(directory, clean_empties=False):
- songs.add(f)
- # We decide whether to update a file's metadata by checking its
- # system modification date. If it's above the value self.last_ran
- # which is passed to us that means media monitor wasn't aware when
- # this changes occured in the filesystem hence it will send the
- # correct events to sync the database with the filesystem
- if os.path.getmtime(f) > last_ran:
- modded += 1
- dispatcher.send(signal=self.watch_signal, sender=self,
- event=ModifyFile(f))
- db_songs = set(( song for song in self.db.directory_get_files(directory,
- all_files)
- if mmp.sub_path(directory,song) ))
- # Get all the files that are in the database but in the file
- # system. These are the files marked for deletions
- for to_delete in db_songs.difference(songs):
- dispatcher.send(signal=self.watch_signal, sender=self,
- event=DeleteFile(to_delete))
- deleted += 1
- for to_add in songs.difference(db_songs):
- dispatcher.send(signal=self.watch_signal, sender=self,
- event=NewFile(to_add))
- added += 1
- self.logger.info( "Flushed watch directory (%s). \
- (added, modified, deleted) = (%d, %d, %d)"
- % (directory, added, modded, deleted) )
diff --git a/python_apps/media-monitor/mm2/media/monitor/config.py b/python_apps/media-monitor/mm2/media/monitor/config.py
deleted file mode 100644
index 4e6f955df..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/config.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# -*- coding: utf-8 -*-
-import os
-import copy
-from configobj import ConfigObj
-
-from exceptions import NoConfigFile, ConfigAccessViolation
-import pure as mmp
-
-class MMConfig(object):
- def __init__(self, path):
- if not os.path.exists(path): raise NoConfigFile(path)
- self.cfg = ConfigObj(path)
-
- def __getitem__(self, key):
- """ We always return a copy of the config item to prevent
- callers from doing any modifications through the returned
- objects methods """
- return copy.deepcopy(self.cfg[key])
-
- def __setitem__(self, key, value):
- """ We use this method not to allow anybody to mess around with
- config file any settings made should be done through MMConfig's
- instance methods """
- raise ConfigAccessViolation(key)
-
- def save(self): self.cfg.write()
-
- def last_ran(self):
- """ Returns the last time media monitor was ran by looking at
- the time when the file at 'index_path' was modified """
- return mmp.last_modified(self.cfg['media-monitor']['index_path'])
-
diff --git a/python_apps/media-monitor/mm2/media/monitor/eventcontractor.py b/python_apps/media-monitor/mm2/media/monitor/eventcontractor.py
deleted file mode 100644
index bd0bd295d..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/eventcontractor.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from log import Loggable
-from events import DeleteFile
-
-class EventContractor(Loggable):
- def __init__(self):
- self.store = {}
- def event_registered(self, evt):
- """
- returns true if the event is registered which means that there is
- another "unpacked" event somewhere out there with the same path
- """
- return evt.path in self.store
-
- def get_old_event(self, evt):
- """
- get the previously registered event with the same path as 'evt'
- """
- return self.store[ evt.path ]
-
- def register(self, evt):
- if self.event_registered(evt):
- ev_proxy = self.get_old_event(evt)
- if ev_proxy.same_event(evt):
- ev_proxy.merge_proxy(evt)
- return False
- # delete overrides any other event
- elif evt.is_event(DeleteFile):
- ev_proxy.merge_proxy(evt)
- return False
- else:
- ev_proxy.run_hook()
- ev_proxy.reset_hook()
-
- self.store[ evt.path ] = evt
- evt.set_pack_hook( lambda : self.__unregister(evt) )
- return True
-
- def __unregister(self, evt):
- del self.store[evt.path]
-
diff --git a/python_apps/media-monitor/mm2/media/monitor/eventdrainer.py b/python_apps/media-monitor/mm2/media/monitor/eventdrainer.py
deleted file mode 100644
index 85eb0d673..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/eventdrainer.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import socket
-import time
-from log import Loggable
-from toucher import RepeatTimer
-from amqplib.client_0_8.exceptions import AMQPConnectionException
-
-class EventDrainer(Loggable):
- """
- Flushes events from RabbitMQ that are sent from airtime every
- certain amount of time
- """
- def __init__(self, airtime_notifier, interval=1):
- def cb():
- try:
- message = airtime_notifier.simple_queue.get(block=True)
- airtime_notifier.handle_message(message.payload)
- message.ack()
- except (IOError, AttributeError, AMQPConnectionException), e:
- self.logger.error('Exception: %s', e)
- while not airtime_notifier.init_rabbit_mq():
- self.logger.error("Error connecting to RabbitMQ Server. \
- Trying again in few seconds")
- time.sleep(5)
-
- t = RepeatTimer(interval, cb)
- t.daemon = True
- t.start()
diff --git a/python_apps/media-monitor/mm2/media/monitor/events.py b/python_apps/media-monitor/mm2/media/monitor/events.py
deleted file mode 100644
index 52f0662d0..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/events.py
+++ /dev/null
@@ -1,261 +0,0 @@
-# -*- coding: utf-8 -*-
-import os
-import abc
-import re
-import pure as mmp
-from pure import LazyProperty
-from metadata import Metadata
-from log import Loggable
-from exceptions import BadSongFile
-from ..saas.thread import getsig, user
-
-class PathChannel(object):
- """ Simple struct to hold a 'signal' string and a related 'path'.
- Basically used as a named tuple """
- def __init__(self, signal, path):
- self.signal = getsig(signal)
- self.path = path
-
-# TODO : Move this to it's file. Also possible unsingleton and use it as a
-# simple module just like m.m.owners
-class EventRegistry(object):
- """ This class's main use is to keep track all events with a cookie
- attribute. This is done mainly because some events must be 'morphed'
- into other events because we later detect that they are move events
- instead of delete events. """
- def __init__(self):
- self.registry = {}
- def register(self,evt): self.registry[evt.cookie] = evt
- def unregister(self,evt): del self.registry[evt.cookie]
- def registered(self,evt): return evt.cookie in self.registry
- def matching(self,evt):
- event = self.registry[evt.cookie]
- # Want to disallow accessing the same event twice
- self.unregister(event)
- return event
-
-class EventProxy(Loggable):
- """ A container object for instances of BaseEvent (or it's
- subclasses) used for event contractor """
- def __init__(self, orig_evt):
- self.orig_evt = orig_evt
- self.evt = orig_evt
- self.reset_hook()
- if hasattr(orig_evt, 'path'): self.path = orig_evt.path
-
- def set_pack_hook(self, l):
- self._pack_hook = l
-
- def reset_hook(self):
- self._pack_hook = lambda : None
-
- def run_hook(self):
- self._pack_hook()
-
- def safe_pack(self):
- self.run_hook()
- # make sure that cleanup hook is never called twice for the same event
- self.reset_hook()
- return self.evt.safe_pack()
-
- def merge_proxy(self, proxy):
- self.evt = proxy.evt
-
- def is_event(self, real_event):
- return isinstance(self.evt, real_event)
-
- def same_event(self, proxy):
- return self.evt.__class__ == proxy.evt.__class__
-
-
-class HasMetaData(object):
- """ Any class that inherits from this class gains the metadata
- attribute that loads metadata from the class's 'path' attribute.
- This is done lazily so there is no performance penalty to inheriting
- from this and subsequent calls to metadata are cached """
- __metaclass__ = abc.ABCMeta
- @LazyProperty
- def metadata(self): return Metadata(self.path)
-
-class BaseEvent(Loggable):
- __metaclass__ = abc.ABCMeta
- def __init__(self, raw_event):
- # TODO : clean up this idiotic hack
- # we should use keyword constructors instead of this behaviour checking
- # bs to initialize BaseEvent
- if hasattr(raw_event,"pathname"):
- self._raw_event = raw_event
- self.path = os.path.normpath(raw_event.pathname)
- else: self.path = raw_event
- self.owner = user().owner.get_owner(self.path)
- owner_re = re.search('stor/imported/(?P\d+)/', self.path)
- if owner_re:
- self.logger.info("matched path: %s" % self.path)
- self.owner = owner_re.group('owner')
- else:
- self.logger.info("did not match path: %s" % self.path)
- self._pack_hook = lambda: None # no op
- # into another event
-
- # TODO : delete this method later
- def reset_hook(self):
- """ Resets the hook that is called after an event is packed.
- Before resetting the hook we execute it to make sure that
- whatever cleanup operations were queued are executed. """
- self._pack_hook()
- self._pack_hook = lambda: None
-
- def exists(self): return os.path.exists(self.path)
-
- @LazyProperty
- def cookie(self): return getattr( self._raw_event, 'cookie', None )
-
- def __str__(self):
- return "Event(%s). Path(%s)" % ( self.path, self.__class__.__name__)
-
- # TODO : delete this method later
- def add_safe_pack_hook(self,k):
- """ adds a callable object (function) that will be called after
- the event has been "safe_packed" """
- self._pack_hook = k
-
- def proxify(self):
- return EventProxy(self)
-
- # As opposed to unsafe_pack...
- def safe_pack(self):
- """ returns exceptions instead of throwing them to be consistent
- with events that must catch their own BadSongFile exceptions
- since generate a set of exceptions instead of a single one """
- try:
- self._pack_hook()
- ret = self.pack()
- # Remove owner of this file only after packing. Otherwise packing
- # will not serialize the owner correctly into the airtime request
- user().owner.remove_file_owner(self.path)
- return ret
- except BadSongFile as e: return [e]
- except Exception as e:
- self.unexpected_exception(e)
- return[e]
-
- # nothing to see here, please move along
- def morph_into(self, evt):
- self.logger.info("Morphing %s into %s" % ( str(self), str(evt) ) )
- self._raw_event = evt._raw_event
- self.path = evt.path
- self.__class__ = evt.__class__
- # Clean up old hook and transfer the new events hook
- self.reset_hook()
- self.add_safe_pack_hook( evt._pack_hook )
- return self
-
- def assign_owner(self,req):
- """ Packs self.owner to req if the owner is valid. I.e. it's not
- -1. This method is used by various events that would like to
- pass owner as a parameter. NewFile for example. """
- if self.owner != -1: req['MDATA_KEY_OWNER_ID'] = self.owner
-
-class FakePyinotify(object):
- """ sometimes we must create our own pyinotify like objects to
- instantiate objects from the classes below whenever we want to turn
- a single event into multiple events """
- def __init__(self, path): self.pathname = path
-
-class OrganizeFile(BaseEvent, HasMetaData):
- """ The only kind of event that does support the pack protocol. It's
- used internally with mediamonitor to move files in the organize
- directory. """
- def __init__(self, *args, **kwargs):
- super(OrganizeFile, self).__init__(*args, **kwargs)
- def pack(self):
- raise AttributeError("You can't send organize events to airtime!!!")
-
-class NewFile(BaseEvent, HasMetaData):
- """ NewFile events are the only events that contain
- MDATA_KEY_OWNER_ID metadata in them. """
- def __init__(self, *args, **kwargs):
- super(NewFile, self).__init__(*args, **kwargs)
- def pack(self):
- """ packs turns an event into a media monitor request """
- req_dict = self.metadata.extract()
- req_dict['mode'] = u'create'
- req_dict['is_record'] = self.metadata.is_recorded()
- self.assign_owner(req_dict)
- req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
- return [req_dict]
-
-class DeleteFile(BaseEvent):
- """ DeleteFile event only contains the path to be deleted. No other
- metadata can be or is included. (This is because this event is fired
- after the deletion occurs). """
- def __init__(self, *args, **kwargs):
- super(DeleteFile, self).__init__(*args, **kwargs)
- def pack(self):
- req_dict = {}
- req_dict['mode'] = u'delete'
- req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
- return [req_dict]
-
-class MoveFile(BaseEvent, HasMetaData):
- """ Path argument should be the new path of the file that was moved """
- def __init__(self, *args, **kwargs):
- super(MoveFile, self).__init__(*args, **kwargs)
- def old_path(self):
- return self._raw_event.src_pathname
- def pack(self):
- req_dict = {}
- req_dict['mode'] = u'moved'
- req_dict['MDATA_KEY_ORIGINAL_PATH'] = self.old_path()
- req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
- req_dict['MDATA_KEY_MD5'] = self.metadata.extract()['MDATA_KEY_MD5']
- return [req_dict]
-
-class ModifyFile(BaseEvent, HasMetaData):
- def __init__(self, *args, **kwargs):
- super(ModifyFile, self).__init__(*args, **kwargs)
- def pack(self):
- req_dict = self.metadata.extract()
- req_dict['mode'] = u'modify'
- # path to directory that is to be removed
- req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path )
- return [req_dict]
-
-def map_events(directory, constructor):
- """ Walks 'directory' and creates an event using 'constructor'.
- Returns a list of the constructed events. """
- # -unknown-path should not appear in the path here but more testing
- # might be necessary
- for f in mmp.walk_supported(directory, clean_empties=False):
- try:
- for e in constructor( FakePyinotify(f) ).pack(): yield e
- except BadSongFile as e: yield e
-
-class DeleteDir(BaseEvent):
- """ A DeleteDir event unfolds itself into a list of DeleteFile
- events for every file in the directory. """
- def __init__(self, *args, **kwargs):
- super(DeleteDir, self).__init__(*args, **kwargs)
- def pack(self):
- return map_events( self.path, DeleteFile )
-
-class MoveDir(BaseEvent):
- """ A MoveDir event unfolds itself into a list of MoveFile events
- for every file in the directory. """
- def __init__(self, *args, **kwargs):
- super(MoveDir, self).__init__(*args, **kwargs)
- def pack(self):
- return map_events( self.path, MoveFile )
-
-class DeleteDirWatch(BaseEvent):
- """ Deleting a watched directory is different from deleting any
- other directory. Hence we must have a separate event to handle this
- case """
- def __init__(self, *args, **kwargs):
- super(DeleteDirWatch, self).__init__(*args, **kwargs)
- def pack(self):
- req_dict = {}
- req_dict['mode'] = u'delete_dir'
- req_dict['MDATA_KEY_FILEPATH'] = unicode( self.path + "/" )
- return [req_dict]
-
diff --git a/python_apps/media-monitor/mm2/media/monitor/exceptions.py b/python_apps/media-monitor/mm2/media/monitor/exceptions.py
deleted file mode 100644
index f7d022fb1..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/exceptions.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-class BadSongFile(Exception):
- def __init__(self, path): self.path = path
- def __str__(self): return "Can't read %s" % self.path
-
-class NoConfigFile(Exception):
- def __init__(self, path): self.path = path
- def __str__(self):
- return "Path '%s' for config file does not exit" % self.path
-
-class ConfigAccessViolation(Exception):
- def __init__(self,key): self.key = key
- def __str__(self): return "You must not access key '%s' directly" % self.key
-
-class FailedToSetLocale(Exception):
- def __str__(self): return "Failed to set locale"
-
-class FailedToObtainLocale(Exception):
- def __init__(self, path, cause):
- self.path = path
- self.cause = cause
- def __str__(self): return "Failed to obtain locale from '%s'" % self.path
-
-class CouldNotCreateIndexFile(Exception):
- """exception whenever index file cannot be created"""
- def __init__(self, path, cause=None):
- self.path = path
- self.cause = cause
- def __str__(self): return "Failed to create touch file '%s'" % self.path
-
-class DirectoryIsNotListed(Exception):
- def __init__(self,dir_id,cause=None):
- self.dir_id = dir_id
- self.cause = cause
- def __str__(self):
- return "%d was not listed as a directory in the database" % self.dir_id
-
-class FailedToCreateDir(Exception):
- def __init__(self,path, parent):
- self.path = path
- self.parent = parent
- def __str__(self): return "Failed to create path '%s'" % self.path
-
-class NoDirectoryInAirtime(Exception):
- def __init__(self,path, does_exist):
- self.path = path
- self.does_exist = does_exist
- def __str__(self):
- return "Directory '%s' does not exist in Airtime.\n \
- However: %s do exist." % (self.path, self.does_exist)
-
-class InvalidMetadataElement(Exception):
- def __init__(self, parent, key, path):
- self.parent = parent
- self.key = key
- self.path = path
- def __str__(self):
- return "InvalidMetadataElement: (key,path) = (%s,%s)" \
- % (self.key, self.path)
-
diff --git a/python_apps/media-monitor/mm2/media/monitor/handler.py b/python_apps/media-monitor/mm2/media/monitor/handler.py
deleted file mode 100644
index 7e00bfde9..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/handler.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-from pydispatch import dispatcher
-import abc
-
-from log import Loggable
-from ..saas.thread import getsig
-import pure as mmp
-
-# Defines the handle interface
-class Handles(object):
- __metaclass__ = abc.ABCMeta
- @abc.abstractmethod
- def handle(self, sender, event, *args, **kwargs): pass
-
-# TODO : Investigate whether weak reffing in dispatcher.connect could possibly
-# cause a memory leak
-
-class ReportHandler(Handles):
- """
- A handler that can also report problem files when things go wrong
- through the report_problem_file routine
- """
- __metaclass__ = abc.ABCMeta
- def __init__(self, signal, weak=False):
- self.signal = getsig(signal)
- self.report_signal = getsig("badfile")
- def dummy(sender, event): self.handle(sender,event)
- dispatcher.connect(dummy, signal=self.signal, sender=dispatcher.Any,
- weak=weak)
-
- def report_problem_file(self, event, exception=None):
- dispatcher.send(signal=self.report_signal, sender=self, event=event,
- exception=exception)
-
-class ProblemFileHandler(Handles, Loggable):
- """
- Responsible for answering to events passed through the 'badfile'
- signal. Moves the problem file passed to the designated directory.
- """
- def __init__(self, channel, **kwargs):
- self.channel = channel
- self.signal = getsig(self.channel.signal)
- self.problem_dir = self.channel.path
- def dummy(sender, event, exception):
- self.handle(sender, event, exception)
- dispatcher.connect(dummy, signal=self.signal, sender=dispatcher.Any,
- weak=False)
- mmp.create_dir( self.problem_dir )
- self.logger.info("Initialized problem file handler. Problem dir: '%s'" %
- self.problem_dir)
-
- def handle(self, sender, event, exception=None):
- # TODO : use the exception parameter for something
- self.logger.info("Received problem file: '%s'. Supposed to move it to \
- problem dir", event.path)
- try: mmp.move_to_dir(dir_path=self.problem_dir, file_path=event.path)
- except Exception as e:
- self.logger.info("Could not move file: '%s' to problem dir: '%s'" %
- (event.path, self.problem_dir))
- self.logger.info("Exception: %s" % str(e))
diff --git a/python_apps/media-monitor/mm2/media/monitor/listeners.py b/python_apps/media-monitor/mm2/media/monitor/listeners.py
deleted file mode 100644
index 0437a7964..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/listeners.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# -*- coding: utf-8 -*-
-import pyinotify
-from pydispatch import dispatcher
-from functools import wraps
-
-import pure as mmp
-from pure import IncludeOnly
-from events import OrganizeFile, NewFile, MoveFile, DeleteFile, \
- DeleteDir, MoveDir,\
- DeleteDirWatch
-from log import Loggable
-from ..saas.thread import getsig, user
-# Note: Because of the way classes that inherit from pyinotify.ProcessEvent
-# interact with constructors. you should only instantiate objects from them
-# using keyword arguments. For example:
-# OrganizeListener('watch_signal') <= wrong
-# OrganizeListener(signal='watch_signal') <= right
-
-class FileMediator(Loggable):
- # TODO : this class is not actually used. remove all references to it
- # everywhere (including tests).
- """ FileMediator is used an intermediate mechanism that filters out
- certain events. """
- def __init__(self) : self.ignored_set = set([]) # for paths only
- def is_ignored(self,path) : return path in self.ignored_set
- def ignore(self, path) : self.ignored_set.add(path)
- def unignore(self, path) : self.ignored_set.remove(path)
-
-def mediate_ignored(fn):
- @wraps(fn)
- def wrapped(self, event, *args,**kwargs):
- event.pathname = unicode(event.pathname, "utf-8")
- if user().file_mediator.is_ignored(event.pathname):
- user().file_mediator.logger.info("Ignoring: '%s' (once)" % event.pathname)
- user().file_mediator.unignore(event.pathname)
- else: return fn(self, event, *args, **kwargs)
- return wrapped
-
-class BaseListener(object):
- def __str__(self):
- return "Listener(%s), Signal(%s)" % \
- (self.__class__.__name__, self. signal)
- def my_init(self, signal): self.signal = getsig(signal)
-
-class OrganizeListener(BaseListener, pyinotify.ProcessEvent, Loggable):
- def process_IN_CLOSE_WRITE(self, event):
- #self.logger.info("===> handling: '%s'" % str(event))
- self.process_to_organize(event)
-
- def process_IN_MOVED_TO(self, event):
- #self.logger.info("===> handling: '%s'" % str(event))
- self.process_to_organize(event)
-
- def flush_events(self, path):
- """
- organize the whole directory at path. (pretty much by doing what
- handle does to every file
- """
- flushed = 0
- for f in mmp.walk_supported(path, clean_empties=True):
- self.logger.info("Bootstrapping: File in 'organize' directory: \
- '%s'" % f)
- if not mmp.file_locked(f):
- dispatcher.send(signal=getsig(self.signal), sender=self,
- event=OrganizeFile(f))
- flushed += 1
- #self.logger.info("Flushed organized directory with %d files" % flushed)
-
- @IncludeOnly(mmp.supported_extensions)
- def process_to_organize(self, event):
- dispatcher.send(signal=getsig(self.signal), sender=self,
- event=OrganizeFile(event))
-
-class StoreWatchListener(BaseListener, Loggable, pyinotify.ProcessEvent):
- def process_IN_CLOSE_WRITE(self, event):
- self.process_create(event)
- def process_IN_MOVED_TO(self, event):
- if user().event_registry.registered(event):
- # We need this trick because we don't how to "expand" dir events
- # into file events until we know for sure if we deleted or moved
- morph = MoveDir(event) if event.dir else MoveFile(event)
- user().event_registry.matching(event).morph_into(morph)
- else: self.process_create(event)
- def process_IN_MOVED_FROM(self, event):
- # Is either delete dir or delete file
- evt = self.process_delete(event)
- # evt can be none whenever event points that a file that would be
- # ignored by @IncludeOnly
- if hasattr(event,'cookie') and (evt != None):
- user().event_registry.register(evt)
- def process_IN_DELETE(self,event): self.process_delete(event)
- def process_IN_MOVE_SELF(self, event):
- if '-unknown-path' in event.pathname:
- event.pathname = event.pathname.replace('-unknown-path','')
- self.delete_watch_dir(event)
-
- def delete_watch_dir(self, event):
- e = DeleteDirWatch(event)
- dispatcher.send(signal=getsig('watch_move'), sender=self, event=e)
- dispatcher.send(signal=getsig(self.signal), sender=self, event=e)
-
- @mediate_ignored
- @IncludeOnly(mmp.supported_extensions)
- def process_create(self, event):
- evt = NewFile(event)
- dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
- return evt
-
- @mediate_ignored
- @IncludeOnly(mmp.supported_extensions)
- def process_delete(self, event):
- evt = None
- if event.dir : evt = DeleteDir(event)
- else : evt = DeleteFile(event)
- dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
- return evt
-
- @mediate_ignored
- def process_delete_dir(self, event):
- evt = DeleteDir(event)
- dispatcher.send(signal=getsig(self.signal), sender=self, event=evt)
- return evt
-
- def flush_events(self, path):
- """
- walk over path and send a NewFile event for every file in this
- directory. Not to be confused with bootstrapping which is a more
- careful process that involved figuring out what's in the database
- first.
- """
- # Songs is a dictionary where every key is the watched the directory
- # and the value is a set with all the files in that directory.
- added = 0
- for f in mmp.walk_supported(path, clean_empties=False):
- added += 1
- dispatcher.send( signal=getsig(self.signal), sender=self, event=NewFile(f) )
- self.logger.info( "Flushed watch directory. added = %d" % added )
-
diff --git a/python_apps/media-monitor/mm2/media/monitor/log.py b/python_apps/media-monitor/mm2/media/monitor/log.py
deleted file mode 100644
index ec9523d7f..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/log.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import logging
-import abc
-import traceback
-from pure import LazyProperty
-
-appname = 'root'
-
-def setup_logging(log_path):
- """ Setup logging by writing log to 'log_path' """
- #logger = logging.getLogger(appname)
- logging.basicConfig(filename=log_path, level=logging.DEBUG)
-
-def get_logger():
- """ in case we want to use the common logger from a procedural
- interface """
- return logging.getLogger()
-
-class Loggable(object):
- """ Any class that wants to log can inherit from this class and
- automatically get a logger attribute that can be used like:
- self.logger.info(...) etc. """
- __metaclass__ = abc.ABCMeta
- @LazyProperty
- def logger(self): return get_logger()
-
- def unexpected_exception(self,e):
- """ Default message for 'unexpected' exceptions """
- self.fatal_exception("'Unexpected' exception has occured:", e)
-
- def fatal_exception(self, message, e):
- """ Prints an exception 'e' with 'message'. Also outputs the
- traceback. """
- self.logger.error( message )
- self.logger.error( str(e) )
- self.logger.error( traceback.format_exc() )
-
diff --git a/python_apps/media-monitor/mm2/media/monitor/manager.py b/python_apps/media-monitor/mm2/media/monitor/manager.py
deleted file mode 100644
index c457b39b7..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/manager.py
+++ /dev/null
@@ -1,236 +0,0 @@
-import pyinotify
-import time
-import os
-from pydispatch import dispatcher
-
-from os.path import normpath
-from events import PathChannel
-from log import Loggable
-from listeners import StoreWatchListener, OrganizeListener
-from handler import ProblemFileHandler
-from organizer import Organizer
-from ..saas.thread import InstanceInheritingThread, getsig
-import pure as mmp
-
-
-class ManagerTimeout(InstanceInheritingThread,Loggable):
- """ The purpose of this class is to flush the organize directory
- every 3 secnods. This used to be just a work around for cc-4235
- but recently became a permanent solution because it's "cheap" and
- reliable """
- def __init__(self, manager, interval=1.5):
- # TODO : interval should be read from config and passed here instead
- # of just using the hard coded value
- super(ManagerTimeout, self).__init__()
- self.manager = manager
- self.interval = interval
- def run(self):
- while True:
- time.sleep(self.interval)
- self.manager.flush_organize()
-
-class Manager(Loggable):
- # NOTE : this massive class is a source of many problems of mm and
- # is in dire need of breaking up and refactoring.
- """ An abstraction over media monitors core pyinotify functions.
- These include adding watched,store, organize directories, etc.
- Basically composes over WatchManager from pyinotify """
- def __init__(self):
- self.wm = pyinotify.WatchManager()
- # These two instance variables are assumed to be constant
- self.watch_channel = getsig('watch')
- self.organize_channel = getsig('organize')
- self.watch_listener = StoreWatchListener(signal = self.watch_channel)
- self.__timeout_thread = ManagerTimeout(self)
- self.__timeout_thread.daemon = True
- self.__timeout_thread.start()
- self.organize = {
- 'organize_path' : None,
- 'imported_path' : None,
- 'recorded_path' : None,
- 'problem_files_path' : None,
- 'organizer' : None,
- 'problem_handler' : None,
- 'organize_listener' : OrganizeListener(signal=
- self.organize_channel),
- }
- def dummy(sender, event): self.watch_move( event.path, sender=sender )
- dispatcher.connect(dummy, signal=getsig('watch_move'),
- sender=dispatcher.Any, weak=False)
- def subwatch_add(sender, directory):
- self.__add_watch(directory, self.watch_listener)
- dispatcher.connect(subwatch_add, signal=getsig('add_subwatch'),
- sender=dispatcher.Any, weak=False)
- # A private mapping path => watch_descriptor
- # we use the same dictionary for organize, watch, store wd events.
- # this is a little hacky because we are unable to have multiple wd's
- # on the same path.
- self.__wd_path = {}
- # The following set isn't really necessary anymore. Should be
- # removed...
- self.watched_directories = set([])
-
- # This is the only event that we are unable to process "normally". I.e.
- # through dedicated handler objects. Because we must have access to a
- # manager instance. Hence we must slightly break encapsulation.
- def watch_move(self, watch_dir, sender=None):
- """ handle 'watch move' events directly sent from listener """
- self.logger.info("Watch dir '%s' has been renamed (hence removed)" %
- watch_dir)
- self.remove_watch_directory(normpath(watch_dir))
-
- def watch_signal(self):
- """ Return the signal string our watch_listener is reading
- events from """
- return getsig(self.watch_listener.signal)
-
- def __remove_watch(self,path):
- """ Remove path from being watched (first will check if 'path'
- is watched) """
- # only delete if dir is actually being watched
- if path in self.__wd_path:
- wd = self.__wd_path[path]
- self.wm.rm_watch(wd, rec=True)
- del(self.__wd_path[path])
-
- def __add_watch(self,path,listener):
- """ Start watching 'path' using 'listener'. First will check if
- directory is being watched before adding another watch """
-
- self.logger.info("Attempting to add listener to path '%s'" % path)
- self.logger.info( 'Listener: %s' % str(listener) )
-
- if not self.has_watch(path):
- wd = self.wm.add_watch(path, pyinotify.ALL_EVENTS, rec=True,
- auto_add=True, proc_fun=listener)
- if wd: self.__wd_path[path] = wd.values()[0]
-
- def __create_organizer(self, target_path, recorded_path):
- """ creates an organizer at new destination path or modifies the
- old one """
- # TODO : find a proper fix for the following hack
- # We avoid creating new instances of organize because of the way
- # it interacts with pydispatch. We must be careful to never have
- # more than one instance of OrganizeListener but this is not so
- # easy. (The singleton hack in Organizer) doesn't work. This is
- # the only thing that seems to work.
- if self.organize['organizer']:
- o = self.organize['organizer']
- o.channel = self.organize_channel
- o.target_path = target_path
- o.recorded_path = recorded_path
- else:
- self.organize['organizer'] = Organizer(channel=
- self.organize_channel, target_path=target_path,
- recorded_path=recorded_path)
-
- def get_problem_files_path(self):
- """ returns the path where problem files should go """
- return self.organize['problem_files_path']
-
- def set_problem_files_path(self, new_path):
- """ Set the path where problem files should go """
- self.organize['problem_files_path'] = new_path
- self.organize['problem_handler'] = \
- ProblemFileHandler( PathChannel(signal=getsig('badfile'),
- path=new_path) )
-
- def get_recorded_path(self):
- """ returns the path of the recorded directory """
- return self.organize['recorded_path']
-
- def set_recorded_path(self, new_path):
- self.__remove_watch(self.organize['recorded_path'])
- self.organize['recorded_path'] = new_path
- self.__create_organizer( self.organize['imported_path'], new_path)
- self.__add_watch(new_path, self.watch_listener)
-
- def get_organize_path(self):
- """ returns the current path that is being watched for
- organization """
- return self.organize['organize_path']
-
- def set_organize_path(self, new_path):
- """ sets the organize path to be new_path. Under the current
- scheme there is only one organize path but there is no reason
- why more cannot be supported """
- # if we are already organizing a particular directory we remove the
- # watch from it first before organizing another directory
- self.__remove_watch(self.organize['organize_path'])
- self.organize['organize_path'] = new_path
- # the OrganizeListener instance will walk path and dispatch an organize
- # event for every file in that directory
- self.organize['organize_listener'].flush_events(new_path)
- #self.__add_watch(new_path, self.organize['organize_listener'])
-
- def flush_organize(self):
- path = self.organize['organize_path']
- self.organize['organize_listener'].flush_events(path)
-
- def get_imported_path(self):
- return self.organize['imported_path']
-
- def set_imported_path(self,new_path):
- """ set the directory where organized files go to. """
- self.__remove_watch(self.organize['imported_path'])
- self.organize['imported_path'] = new_path
- self.__create_organizer( new_path, self.organize['recorded_path'])
- self.__add_watch(new_path, self.watch_listener)
-
- def change_storage_root(self, store):
- """ hooks up all the directories for you. Problem, recorded,
- imported, organize. """
- store_paths = mmp.expand_storage(store)
- # First attempt to make sure that all paths exist before adding any
- # watches
- for path_type, path in store_paths.iteritems():
- try: mmp.create_dir(path)
- except mmp.FailedToCreateDir as e: self.unexpected_exception(e)
-
- os.chmod(store_paths['organize'], 0775)
-
- self.set_problem_files_path(store_paths['problem_files'])
- self.set_imported_path(store_paths['imported'])
- self.set_recorded_path(store_paths['recorded'])
- self.set_organize_path(store_paths['organize'])
-
- def has_watch(self, path):
- """ returns true if the path is being watched or not. Any kind
- of watch: organize, store, watched. """
- return path in self.__wd_path
-
- def add_watch_directory(self, new_dir):
- """ adds a directory to be "watched". "watched" directories are
- those that are being monitored by media monitor for airtime in
- this context and not directories pyinotify calls watched """
- if self.has_watch(new_dir):
- self.logger.info("Cannot add '%s' to watched directories. It's \
- already being watched" % new_dir)
- else:
- self.logger.info("Adding watched directory: '%s'" % new_dir)
- self.__add_watch(new_dir, self.watch_listener)
-
- def remove_watch_directory(self, watch_dir):
- """ removes a directory from being "watched". Undoes
- add_watch_directory """
- if self.has_watch(watch_dir):
- self.logger.info("Removing watched directory: '%s'", watch_dir)
- self.__remove_watch(watch_dir)
- else:
- self.logger.info("'%s' is not being watched, hence cannot be \
- removed" % watch_dir)
- self.logger.info("The directories we are watching now are:")
- self.logger.info( self.__wd_path )
-
- def loop(self):
- """ block until we receive pyinotify events """
- notifier = pyinotify.Notifier(self.wm)
- notifier.coalesce_events()
- notifier.loop()
- #notifier = pyinotify.ThreadedNotifier(self.wm, read_freq=1)
- #notifier.coalesce_events()
- #notifier.start()
- #return notifier
- #import asyncore
- #notifier = pyinotify.AsyncNotifier(self.wm)
- #asyncore.loop()
diff --git a/python_apps/media-monitor/mm2/media/monitor/metadata.py b/python_apps/media-monitor/mm2/media/monitor/metadata.py
deleted file mode 100644
index 2c0988257..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/metadata.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# -*- coding: utf-8 -*-
-import mutagen
-import os
-import copy
-from mutagen.easymp4 import EasyMP4KeyError
-from mutagen.easyid3 import EasyID3KeyError
-
-from exceptions import BadSongFile, InvalidMetadataElement
-from log import Loggable
-from pure import format_length
-import pure as mmp
-
-# emf related stuff
-from ..metadata.process import global_reader
-from ..metadata import definitions as defs
-defs.load_definitions()
-
-"""
-list of supported easy tags in mutagen version 1.20
-['albumartistsort', 'musicbrainz_albumstatus', 'lyricist', 'releasecountry',
-'date', 'performer', 'musicbrainz_albumartistid', 'composer', 'encodedby',
-'tracknumber', 'musicbrainz_albumid', 'album', 'asin', 'musicbrainz_artistid',
-'mood', 'copyright', 'author', 'media', 'length', 'version', 'artistsort',
-'titlesort', 'discsubtitle', 'website', 'musicip_fingerprint', 'conductor',
-'compilation', 'barcode', 'performer:*', 'composersort', 'musicbrainz_discid',
-'musicbrainz_albumtype', 'genre', 'isrc', 'discnumber', 'musicbrainz_trmid',
-'replaygain_*_gain', 'musicip_puid', 'artist', 'title', 'bpm',
-'musicbrainz_trackid', 'arranger', 'albumsort', 'replaygain_*_peak',
-'organization']
-"""
-
-airtime2mutagen = {
- "MDATA_KEY_TITLE" : "title",
- "MDATA_KEY_CREATOR" : "artist",
- "MDATA_KEY_SOURCE" : "album",
- "MDATA_KEY_GENRE" : "genre",
- "MDATA_KEY_MOOD" : "mood",
- "MDATA_KEY_TRACKNUMBER" : "tracknumber",
- "MDATA_KEY_BPM" : "bpm",
- "MDATA_KEY_LABEL" : "label",
- "MDATA_KEY_COMPOSER" : "composer",
- "MDATA_KEY_ENCODER" : "encodedby",
- "MDATA_KEY_CONDUCTOR" : "conductor",
- "MDATA_KEY_YEAR" : "date",
- "MDATA_KEY_URL" : "website",
- "MDATA_KEY_ISRC" : "isrc",
- "MDATA_KEY_COPYRIGHT" : "copyright",
- "MDATA_KEY_CUE_IN" : "cuein",
- "MDATA_KEY_CUE_OUT" : "cueout",
-}
-
-#doesn't make sense for us to write these values to a track's metadata
-mutagen_do_not_write = ["MDATA_KEY_CUE_IN", "MDATA_KEY_CUE_OUT"]
-
-# Some airtime attributes are special because they must use the mutagen object
-# itself to calculate the value that they need. The lambda associated with each
-# key should attempt to extract the corresponding value from the mutagen object
-# itself pass as 'm'. In the case when nothing can be extracted the lambda
-# should return some default value to be assigned anyway or None so that the
-# airtime metadata object will skip the attribute outright.
-
-airtime_special = {
- "MDATA_KEY_DURATION" :
- lambda m: format_length(getattr(m.info, u'length', 0.0)),
- "MDATA_KEY_BITRATE" :
- lambda m: getattr(m.info, "bitrate", ''),
- "MDATA_KEY_SAMPLERATE" :
- lambda m: getattr(m.info, u'sample_rate', 0),
- "MDATA_KEY_MIME" :
- lambda m: m.mime[0] if len(m.mime) > 0 else u'',
-}
-mutagen2airtime = dict( (v,k) for k,v in airtime2mutagen.iteritems()
- if isinstance(v, str) )
-
-truncate_table = {
- 'MDATA_KEY_GENRE' : 64,
- 'MDATA_KEY_TITLE' : 512,
- 'MDATA_KEY_CREATOR' : 512,
- 'MDATA_KEY_SOURCE' : 512,
- 'MDATA_KEY_MOOD' : 64,
- 'MDATA_KEY_LABEL' : 512,
- 'MDATA_KEY_COMPOSER' : 512,
- 'MDATA_KEY_ENCODER' : 255,
- 'MDATA_KEY_CONDUCTOR' : 512,
- 'MDATA_KEY_YEAR' : 16,
- 'MDATA_KEY_URL' : 512,
- 'MDATA_KEY_ISRC' : 512,
- 'MDATA_KEY_COPYRIGHT' : 512,
-}
-
-class Metadata(Loggable):
- # TODO : refactor the way metadata is being handled. Right now things are a
- # little bit messy. Some of the handling is in m.m.pure while the rest is
- # here. Also interface is not very consistent
-
- @staticmethod
- def fix_title(path):
- # If we have no title in path we will format it
- # TODO : this is very hacky so make sure to fix it
- m = mutagen.File(path, easy=True)
- if u'title' not in m:
- new_title = unicode( mmp.no_extension_basename(path) )
- m[u'title'] = new_title
- m.save()
-
- @staticmethod
- def write_unsafe(path,md):
- """
- Writes 'md' metadata into 'path' through mutagen. Converts all
- dictionary values to strings because mutagen will not write anything
- else
- """
- if not os.path.exists(path): raise BadSongFile(path)
- song_file = mutagen.File(path, easy=True)
- exceptions = [] # for bad keys
- for airtime_k, airtime_v in md.iteritems():
- if airtime_k in airtime2mutagen and \
- airtime_k not in mutagen_do_not_write:
- # The unicode cast here is mostly for integers that need to be
- # strings
- if airtime_v is None: continue
- try:
- song_file[ airtime2mutagen[airtime_k] ] = unicode(airtime_v)
- except (EasyMP4KeyError, EasyID3KeyError) as e:
- exceptions.append(InvalidMetadataElement(e, airtime_k,
- path))
- song_file.save()
- # bubble dem up so that user knows that something is wrong
- for e in exceptions: raise e
-
- def __init__(self, fpath):
- # Forcing the unicode through
- try : fpath = fpath.decode("utf-8")
- except : pass
- self.__metadata = global_reader.read_mutagen(fpath)
-
- def is_recorded(self):
- """
- returns true if the file has been created by airtime through recording
- """
- return mmp.is_airtime_recorded( self.__metadata )
-
- def extract(self):
- """
- returns a copy of the metadata that was loaded when object was
- constructed
- """
- return copy.deepcopy(self.__metadata)
-
- def utf8(self):
- """
- Returns a unicode aware representation of the data that is compatible
- with what is spent to airtime
- """
- return mmp.convert_dict_value_to_utf8(self.extract())
diff --git a/python_apps/media-monitor/mm2/media/monitor/organizer.py b/python_apps/media-monitor/mm2/media/monitor/organizer.py
deleted file mode 100644
index c4550bdfb..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/organizer.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-import pure as mmp
-from handler import ReportHandler
-from log import Loggable
-from exceptions import BadSongFile
-from events import OrganizeFile
-from pydispatch import dispatcher
-from os.path import dirname
-from ..saas.thread import getsig, user
-import os.path
-
-class Organizer(ReportHandler,Loggable):
- """ Organizer is responsible to to listening to OrganizeListener
- events and committing the appropriate changes to the filesystem.
- It does not in any interact with WatchSyncer's even when the the
- WatchSyncer is a "storage directory". The "storage" directory picks
- up all of its events through pyinotify. (These events are fed to it
- through StoreWatchListener) """
-
- # Commented out making this class a singleton because it's just a band aid
- # for the real issue. The real issue being making multiple Organizer
- # instances with pydispatch
-
- #_instance = None
- #def __new__(cls, channel, target_path, recorded_path):
- #if cls._instance:
- #cls._instance.channel = channel
- #cls._instance.target_path = target_path
- #cls._instance.recorded_path = recorded_path
- #else:
- #cls._instance = super(Organizer, cls).__new__( cls, channel,
- #target_path, recorded_path)
- #return cls._instance
-
- def __init__(self, channel, target_path, recorded_path):
- self.channel = channel
- self.target_path = target_path
- self.recorded_path = recorded_path
- super(Organizer, self).__init__(signal=getsig(self.channel), weak=False)
-
- def handle(self, sender, event):
- """ Intercept events where a new file has been added to the
- organize directory and place it in the correct path (starting
- with self.target_path) """
- # Only handle this event type
- assert isinstance(event, OrganizeFile), \
- "Organizer can only handle OrganizeFile events.Given '%s'" % event
- try:
- # We must select the target_path based on whether file was recorded
- # by airtime or not.
- # Do we need to "massage" the path using mmp.organized_path?
- target_path = self.recorded_path if event.metadata.is_recorded() \
- else self.target_path
- # nasty hack do this properly
- owner_id = mmp.owner_id(event.path)
- if owner_id != -1:
- target_path = os.path.join(target_path, unicode(owner_id))
-
- mdata = event.metadata.extract()
- new_path = mmp.organized_path(event.path, target_path, mdata)
-
- # See hack in mmp.magic_move
- def new_dir_watch(d):
- # TODO : rewrite as return lambda : dispatcher.send(...
- def cb():
- dispatcher.send(signal=getsig("add_subwatch"), sender=self,
- directory=d)
- return cb
-
- mmp.magic_move(event.path, new_path,
- after_dir_make=new_dir_watch(dirname(new_path)))
-
- # The reason we need to go around saving the owner in this
- # backwards way is because we are unable to encode the owner id
- # into the file itself so that the StoreWatchListener listener can
- # detect it from the file
- user().owner.add_file_owner(new_path, owner_id )
-
- self.logger.info('Organized: "%s" into "%s"' %
- (event.path, new_path))
- except BadSongFile as e:
- self.report_problem_file(event=event, exception=e)
- # probably general error in mmp.magic.move...
- except Exception as e:
- self.unexpected_exception( e )
- self.report_problem_file(event=event, exception=e)
-
diff --git a/python_apps/media-monitor/mm2/media/monitor/owners.py b/python_apps/media-monitor/mm2/media/monitor/owners.py
deleted file mode 100644
index 5fc3ad831..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/owners.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-from log import Loggable
-
-class Owner(Loggable):
- def __init__(self):
- # hash: 'filepath' => owner_id
- self.owners = {}
-
- def get_owner(self,f):
- """ Get the owner id of the file 'f' """
- o = self.owners[f] if f in self.owners else -1
- self.logger.info("Received owner for %s. Owner: %s" % (f, o))
- return o
-
-
- def add_file_owner(self,f,owner):
- """ Associate file f with owner. If owner is -1 then do we will not record
- it because -1 means there is no owner. Returns True if f is being stored
- after the function. False otherwise. """
- if owner == -1: return False
- if f in self.owners:
- if owner != self.owners[f]: # check for fishiness
- self.logger.info("Warning ownership of file '%s' changed from '%d' to '%d'"
- % (f, self.owners[f], owner))
- else: return True
- self.owners[f] = owner
- return True
-
- def has_owner(self,f):
- """ True if f is owned by somebody. False otherwise. """
- return f in self.owners
-
- def remove_file_owner(self,f):
- """ Try and delete any association made with file f. Returns true if
- the the association was actually deleted. False otherwise. """
- if f in self.owners:
- del self.owners[f]
- return True
- else: return False
-
diff --git a/python_apps/media-monitor/mm2/media/monitor/pure.py b/python_apps/media-monitor/mm2/media/monitor/pure.py
deleted file mode 100644
index fc6692ba8..000000000
--- a/python_apps/media-monitor/mm2/media/monitor/pure.py
+++ /dev/null
@@ -1,508 +0,0 @@
-# -*- coding: utf-8 -*-
-import copy
-from subprocess import Popen, PIPE
-import subprocess
-import os
-import math
-import wave
-import contextlib
-import shutil, pipes
-import re
-import sys
-import stat
-import hashlib
-import locale
-import operator as op
-
-from os.path import normpath
-from itertools import takewhile
-# you need to import reduce in python 3
-try: from functools import reduce
-except: pass
-from configobj import ConfigObj
-
-from exceptions import FailedToSetLocale, FailedToCreateDir
-
-supported_extensions = [u"mp3", u"ogg", u"oga", u"flac", u"wav",
- u'm4a', u'mp4', 'opus']
-
-unicode_unknown = u'unknown'
-
-path_md = ['MDATA_KEY_TITLE', 'MDATA_KEY_CREATOR', 'MDATA_KEY_SOURCE',
- 'MDATA_KEY_TRACKNUMBER', 'MDATA_KEY_BITRATE']
-
-class LazyProperty(object):
- """
- meant to be used for lazy evaluation of an object attribute.
- property should represent non-mutable data, as it replaces itself.
- """
- def __init__(self,fget):
- self.fget = fget
- self.func_name = fget.__name__
-
- def __get__(self,obj,cls):
- if obj is None: return None
- value = self.fget(obj)
- setattr(obj,self.func_name,value)
- return value
-
-class IncludeOnly(object):
- """
- A little decorator to help listeners only be called on extensions
- they support
- NOTE: this decorator only works on methods and not functions. Maybe
- fix this?
- """
- def __init__(self, *deco_args):
- self.exts = set([])
- for arg in deco_args:
- if isinstance(arg,str): self.add(arg)
- elif hasattr(arg, '__iter__'):
- for x in arg: self.exts.add(x)
- def __call__(self, func):
- def _wrap(moi, event, *args, **kwargs):
- ext = extension(event.pathname)
- # Checking for emptiness b/c we don't want to skip direcotries
- if (ext.lower() in self.exts) or event.dir:
- return func(moi, event, *args, **kwargs)
- return _wrap
-
-def partition(f, alist):
- """
- Partition is very similar to filter except that it also returns the
- elements for which f return false but in a tuple.
- >>> partition(lambda x : x > 3, [1,2,3,4,5,6])
- ([4, 5, 6], [1, 2, 3])
- """
- return (filter(f, alist), filter(lambda x: not f(x), alist))
-
-def is_file_supported(path):
- """
- Checks if a file's path(filename) extension matches the kind that we
- support note that this is case insensitive.
- >>> is_file_supported("test.mp3")
- True
- >>> is_file_supported("/bs/path/test.mP3")
- True
- >>> is_file_supported("test.txt")
- False
- """
- return extension(path).lower() in supported_extensions
-
-# TODO : In the future we would like a better way to find out whether a show
-# has been recorded
-def is_airtime_recorded(md):
- """ Takes a metadata dictionary and returns True if it belongs to a
- file that was recorded by Airtime. """
- if not 'MDATA_KEY_CREATOR' in md: return False
- return md['MDATA_KEY_CREATOR'] == u'Airtime Show Recorder'
-
-def read_wave_duration(path):
- """ Read the length of .wav file (mutagen does not handle this) """
- with contextlib.closing(wave.open(path,'r')) as f:
- frames = f.getnframes()
- rate = f.getframerate()
- duration = frames/float(rate)
- return duration
-
-def clean_empty_dirs(path):
- """ walks path and deletes every empty directory it finds """
- # TODO : test this function
- if path.endswith('/'): clean_empty_dirs(path[0:-1])
- else:
- for root, dirs, _ in os.walk(path, topdown=False):
- full_paths = ( os.path.join(root, d) for d in dirs )
- for d in full_paths:
- if os.path.exists(d):
- #Try block avoids a race condition where a file is added AFTER listdir
- #is run but before removedirs. (Dir is not empty and removedirs throws
- #an exception in that case then.)
- try:
- if not os.listdir(d): os.rmdir(d)
- except OSError:
- pass
-
-def extension(path):
- """
- return extension of path, empty string otherwise. Prefer to return empty
- string instead of None because of bad handling of "maybe" types in python.
- I.e. interpreter won't enforce None checks on the programmer
- >>> extension("testing.php")
- 'php'
- >>> extension("a.b.c.d.php")
- 'php'
- >>> extension('/no/extension')
- ''
- >>> extension('/path/extension.ml')
- 'ml'
- """
- ext = path.split(".")
- if len(ext) < 2: return ""
- else: return ext[-1]
-
-def no_extension_basename(path):
- """
- returns the extensionsless basename of a filepath
- >>> no_extension_basename("/home/test.mp3")
- u'test'
- >>> no_extension_basename("/home/test")
- u'test'
- >>> no_extension_basename('blah.ml')
- u'blah'
- >>> no_extension_basename('a.b.c.d.mp3')
- u'a.b.c.d'
- """
- base = unicode(os.path.basename(path))
- if extension(base) == "": return base
- else: return '.'.join(base.split(".")[0:-1])
-
-def walk_supported(directory, clean_empties=False):
- """ A small generator wrapper around os.walk to only give us files
- that support the extensions we are considering. When clean_empties
- is True we recursively delete empty directories left over in
- directory after the walk. """
- if directory is None:
- return
-
- for root, dirs, files in os.walk(directory):
- full_paths = ( os.path.join(root, name) for name in files
- if is_file_supported(name) )
- for fp in full_paths: yield fp
- if clean_empties: clean_empty_dirs(directory)
-
-
-def file_locked(path):
- #Capture stderr to avoid polluting py-interpreter.log
- proc = Popen(["lsof", path], stdout=PIPE, stderr=PIPE)
- out = proc.communicate()[0].strip('\r\n')
- return bool(out)
-
-def magic_move(old, new, after_dir_make=lambda : None):
- """ Moves path old to new and constructs the necessary to
- directories for new along the way """
- new_dir = os.path.dirname(new)
- if not os.path.exists(new_dir): os.makedirs(new_dir)
- # We need this crusty hack because anytime a directory is created we must
- # re-add it with add_watch otherwise putting files in it will not trigger
- # pyinotify events
- after_dir_make()
- shutil.move(old,new)
-
-def move_to_dir(dir_path,file_path):
- """ moves a file at file_path into dir_path/basename(filename) """
- bs = os.path.basename(file_path)
- magic_move(file_path, os.path.join(dir_path, bs))
-
-def apply_rules_dict(d, rules):
- """ Consumes a dictionary of rules that maps some keys to lambdas
- which it applies to every matching element in d and returns a new
- dictionary with the rules applied. If a rule returns none then it's
- not applied """
- new_d = copy.deepcopy(d)
- for k, rule in rules.iteritems():
- if k in d:
- new_val = rule(d[k])
- if new_val is not None: new_d[k] = new_val
- return new_d
-
-def default_to_f(dictionary, keys, default, condition):
- new_d = copy.deepcopy(dictionary)
- for k in keys:
- if condition(dictionary=new_d, key=k): new_d[k] = default
- return new_d
-
-def default_to(dictionary, keys, default):
- """ Checks if the list of keys 'keys' exists in 'dictionary'. If
- not then it returns a new dictionary with all those missing keys
- defaults to 'default' """
- cnd = lambda dictionary, key: key not in dictionary
- return default_to_f(dictionary, keys, default, cnd)
-
-def remove_whitespace(dictionary):
- """ Remove values that empty whitespace in the dictionary """
- nd = copy.deepcopy(dictionary)
- bad_keys = []
- for k,v in nd.iteritems():
- if hasattr(v,'strip'):
- stripped = v.strip()
- # ghetto and maybe unnecessary
- if stripped == '' or stripped == u'': bad_keys.append(k)
- for bad_key in bad_keys: del nd[bad_key]
- return nd
-
-def parse_int(s):
- # TODO : this function isn't used anywhere yet but it may useful for emf
- """
- Tries very hard to get some sort of integer result from s. Defaults to 0
- when it fails
- >>> parse_int("123")
- '123'
- >>> parse_int("123saf")
- '123'
- >>> parse_int("asdf")
- None
- """
- if s.isdigit(): return s
- else:
- try : return str(reduce(op.add, takewhile(lambda x: x.isdigit(), s)))
- except: return None
-
-
-def organized_path(old_path, root_path, orig_md):
- """
- old_path - path where file is store at the moment <= maybe not necessary?
- root_path - the parent directory where all organized files go
- orig_md - original meta data of the file as given by mutagen AFTER being
- normalized
- return value: new file path
- """
- filepath = None
- ext = extension(old_path)
- def default_f(dictionary, key):
- if key in dictionary: return len(str(dictionary[key])) == 0
- else: return True
- # We set some metadata elements to a default "unknown" value because we use
- # these fields to create a path hence they cannot be empty Here "normal"
- # means normalized only for organized path
-
- # MDATA_KEY_BITRATE is in bytes/second i.e. (256000) we want to turn this
- # into 254kbps
-
- # Some metadata elements cannot be empty, hence we default them to some
- # value just so that we can create a correct path
- normal_md = default_to_f(orig_md, path_md, unicode_unknown, default_f)
- try:
- formatted = str(int(normal_md['MDATA_KEY_BITRATE']) / 1000)
- normal_md['MDATA_KEY_BITRATE'] = formatted + 'kbps'
- except:
- normal_md['MDATA_KEY_BITRATE'] = unicode_unknown
-
- if is_airtime_recorded(normal_md):
- # normal_md['MDATA_KEY_TITLE'] = 'show_name-yyyy-mm-dd-hh:mm:ss'
- r = "(?P.+)-(?P\d+-\d+-\d+)-(?P