instrument some bits

pull/91/head
Adam Greig 2012-07-13 00:28:24 +01:00
rodzic 10f93be2a3
commit 77d34ac037
1 zmienionych plików z 16 dodań i 0 usunięć

Wyświetl plik

@ -14,8 +14,11 @@ import traceback
import calendar import calendar
import optparse import optparse
import subprocess import subprocess
import statsd
import simplejson as json import simplejson as json
statsd.init_statsd({'STATSD_BUCKET_PREFIX': 'habhub.predictor'})
# We use Pydap from http://pydap.org/. # We use Pydap from http://pydap.org/.
import pydap.exceptions, pydap.client, pydap.lib import pydap.exceptions, pydap.client, pydap.lib
pydap.lib.CACHE = "/tmp/pydap-cache/" pydap.lib.CACHE = "/tmp/pydap-cache/"
@ -65,11 +68,14 @@ def update_progress(**kwargs):
global log global log
log.error('Could not update progress file') log.error('Could not update progress file')
@statsd.StatsdTimer.wrap('time')
def main(): def main():
""" """
The main program routine. The main program routine.
""" """
statsd.increment('run')
# Set up our command line options # Set up our command line options
parser = optparse.OptionParser() parser = optparse.OptionParser()
parser.add_option('-d', '--cd', dest='directory', parser.add_option('-d', '--cd', dest='directory',
@ -142,6 +148,7 @@ def main():
# Check we got a UUID in the arguments # Check we got a UUID in the arguments
if len(args) != 1: if len(args) != 1:
log.error('Exactly one positional argument should be supplied (uuid).') log.error('Exactly one positional argument should be supplied (uuid).')
statsd.increment('error')
sys.exit(1) sys.exit(1)
if options.directory: if options.directory:
@ -162,6 +169,7 @@ def main():
if process.find(uuid) > 0: if process.find(uuid) > 0:
pid = int(line.split()[0]) pid = int(line.split()[0])
if pid != os.getpid(): if pid != os.getpid():
statsd.increment('duplicate')
log.error('A process is already running for this UUID, quitting.') log.error('A process is already running for this UUID, quitting.')
sys.exit(1) sys.exit(1)
@ -180,25 +188,30 @@ def main():
run_time=str(int(timelib.time()))) run_time=str(int(timelib.time())))
except IOError: except IOError:
log.error('Error opening progress.json file') log.error('Error opening progress.json file')
statsd.increment('error')
sys.exit(1) sys.exit(1)
# Check the predictor binary exists # Check the predictor binary exists
if not os.path.exists(pred_binary): if not os.path.exists(pred_binary):
log.error('Predictor binary does not exist.') log.error('Predictor binary does not exist.')
statsd.increment('error')
sys.exit(1) sys.exit(1)
# Check the latitude is in the right range. # Check the latitude is in the right range.
if (options.lat < -90) | (options.lat > 90): if (options.lat < -90) | (options.lat > 90):
log.error('Latitude %s is outside of the range (-90,90).') log.error('Latitude %s is outside of the range (-90,90).')
statsd.increment('error')
sys.exit(1) sys.exit(1)
# Check the delta sizes are valid. # Check the delta sizes are valid.
if (options.latdelta <= 0.5) | (options.londelta <= 0.5): if (options.latdelta <= 0.5) | (options.londelta <= 0.5):
log.error('Latitiude and longitude deltas must be at least 0.5 degrees.') log.error('Latitiude and longitude deltas must be at least 0.5 degrees.')
statsd.increment('error')
sys.exit(1) sys.exit(1)
if options.londelta > 180: if options.londelta > 180:
log.error('Longitude window sizes greater than 180 degrees are meaningless.') log.error('Longitude window sizes greater than 180 degrees are meaningless.')
statsd.increment('error')
sys.exit(1) sys.exit(1)
# We need to wrap the longitude into the right range. # We need to wrap the longitude into the right range.
@ -224,6 +237,8 @@ def main():
dataset = dataset_for_time(time_to_find, options.hd) dataset = dataset_for_time(time_to_find, options.hd)
except: except:
log.error('Could not locate a dataset for the requested time.') log.error('Could not locate a dataset for the requested time.')
statsd.increment('no_dataset')
statsd.increment('error')
sys.exit(1) sys.exit(1)
dataset_times = map(timestamp_to_datetime, dataset.time) dataset_times = map(timestamp_to_datetime, dataset.time)
@ -261,6 +276,7 @@ def main():
subprocess.call([pred_binary, '-i/var/www/cusf-standalone-predictor/gfs/', '-v', '-o'+uuid_path+'flight_path.csv', uuid_path+'scenario.ini'] + alarm_flags) subprocess.call([pred_binary, '-i/var/www/cusf-standalone-predictor/gfs/', '-v', '-o'+uuid_path+'flight_path.csv', uuid_path+'scenario.ini'] + alarm_flags)
update_progress(pred_running=False, pred_complete=True) update_progress(pred_running=False, pred_complete=True)
statsd.increment('success')
def purge_cache(): def purge_cache():
""" """