Merge pull request #90 from danielrichman/master

Multiple changes.
pull/91/merge
Adam Greig 2012-07-12 16:23:35 -07:00
commit 10f93be2a3
6 zmienionych plików z 69 dodań i 36 usunięć

Wyświetl plik

@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
PARAM="mtime" PARAM="mtime"
AGE="7" AGE="14"
REPOROOT="/var/www/hab/predict/" REPOROOT="/var/www/hab/predict/"
DATADIR="predict/preds" DATADIR="predict/preds"

Wyświetl plik

@ -10,6 +10,7 @@ import math
import sys import sys
import os import os
import logging import logging
import traceback
import calendar import calendar
import optparse import optparse
import subprocess import subprocess
@ -32,8 +33,9 @@ pydap.util.http.httplib2._entry_disposition = fresh
# Output logger format # Output logger format
log = logging.getLogger('main') log = logging.getLogger('main')
log_formatter = logging.Formatter('%(levelname)s: %(message)s')
console = logging.StreamHandler() console = logging.StreamHandler()
console.setFormatter(logging.Formatter('%(levelname)s: %(message)s')) console.setFormatter(log_formatter)
log.addHandler(console) log.addHandler(console)
progress_f = '' progress_f = ''
@ -45,7 +47,7 @@ progress = {
'gfs_timestamp': '', 'gfs_timestamp': '',
'pred_running': False, 'pred_running': False,
'pred_complete': False, 'pred_complete': False,
'progress_error': '', 'error': '',
} }
def update_progress(**kwargs): def update_progress(**kwargs):
@ -77,6 +79,8 @@ def main():
help='detach the process and run in the background') help='detach the process and run in the background')
parser.add_option('--alarm', dest='alarm', action="store_true", parser.add_option('--alarm', dest='alarm', action="store_true",
help='setup an alarm for 10 minutes time to prevent hung processes') help='setup an alarm for 10 minutes time to prevent hung processes')
parser.add_option('--redirect', dest='redirect', default='/dev/null',
help='if forking, file to send stdout/stderr to', metavar='FILE')
parser.add_option('-t', '--timestamp', dest='timestamp', parser.add_option('-t', '--timestamp', dest='timestamp',
help='search for dataset covering the POSIX timestamp TIME \t[default: now]', help='search for dataset covering the POSIX timestamp TIME \t[default: now]',
metavar='TIME', type='int', metavar='TIME', type='int',
@ -144,7 +148,7 @@ def main():
os.chdir(options.directory) os.chdir(options.directory)
if options.fork: if options.fork:
detach_process() detach_process(options.redirect)
if options.alarm: if options.alarm:
setup_alarm() setup_alarm()
@ -219,7 +223,7 @@ def main():
try: try:
dataset = dataset_for_time(time_to_find, options.hd) dataset = dataset_for_time(time_to_find, options.hd)
except: except:
print('Could not locate a dataset for the requested time.') log.error('Could not locate a dataset for the requested time.')
sys.exit(1) sys.exit(1)
dataset_times = map(timestamp_to_datetime, dataset.time) dataset_times = map(timestamp_to_datetime, dataset.time)
@ -254,7 +258,7 @@ def main():
else: else:
alarm_flags = [] alarm_flags = []
subprocess.call([pred_binary, '-i/var/www/hab/predict/gfs/', '-v', '-o'+uuid_path+'flight_path.csv', uuid_path+'scenario.ini'] + alarm_flags) subprocess.call([pred_binary, '-i/var/www/cusf-standalone-predictor/gfs/', '-v', '-o'+uuid_path+'flight_path.csv', uuid_path+'scenario.ini'] + alarm_flags)
update_progress(pred_running=False, pred_complete=True) update_progress(pred_running=False, pred_complete=True)
@ -350,9 +354,7 @@ def write_file(output_format, data, window, mintime, maxtime):
downloaded_data[var] = selection downloaded_data[var] = selection
log.info(' Downloaded data has shape %s...', selection.shape) log.info(' Downloaded data has shape %s...', selection.shape)
if len(selection.shape) != 3: assert len(selection.shape) == 3
log.error(' Expected 3-d data.')
return
now = datetime.datetime.now() now = datetime.datetime.now()
time_elapsed = now - starttime time_elapsed = now - starttime
@ -539,7 +541,7 @@ def dataset_for_time(time, hd):
raise RuntimeError('Could not find appropriate dataset.') raise RuntimeError('Could not find appropriate dataset.')
def detach_process(): def detach_process(redirect):
# Fork # Fork
if os.fork() > 0: if os.fork() > 0:
os._exit(0) os._exit(0)
@ -547,9 +549,12 @@ def detach_process():
# Detach # Detach
os.setsid() os.setsid()
null_fd = os.open(os.devnull, os.O_RDWR) null_fd = os.open(os.devnull, os.O_RDONLY)
for s in [sys.stdin, sys.stdout, sys.stderr]: out_fd = os.open(redirect, os.O_WRONLY | os.O_APPEND)
os.dup2(null_fd, s.fileno())
os.dup2(null_fd, sys.stdin.fileno())
for s in [sys.stdout, sys.stderr]:
os.dup2(out_fd, s.fileno())
# Fork # Fork
if os.fork() > 0: if os.fork() > 0:
@ -562,5 +567,18 @@ def setup_alarm():
# If this is being run from the interpreter, run the main function. # If this is being run from the interpreter, run the main function.
if __name__ == '__main__': if __name__ == '__main__':
try:
main() main()
except SystemExit as e:
log.debug("Exit: " + repr(e))
if e.code != 0 and progress_f:
update_progress(error="Unknown error exit")
raise
except Exception as e:
log.exception("Uncaught exception")
(exc_type, exc_value, discard_tb) = sys.exc_info()
exc_tb = traceback.format_exception_only(exc_type, exc_value)
info = exc_tb[-1].strip()
if progress_f:
update_progress(error="Unhandled exception: " + info)
raise

Wyświetl plik

@ -8,12 +8,12 @@ define("ADMIN_EMAIL", "jon@hexoc.com");
define("LOCATION_SAVE_ENABLE", true); define("LOCATION_SAVE_ENABLE", true);
define("DEBUG", true);
define("AT_LOG", "/tmp/pred_log");
// Path to the root of the git repo inc. trailing / // Path to the root of the git repo inc. trailing /
define("ROOT", "/var/www/hab/predict/"); define("ROOT", "/var/www/hab/predict/");
// Path to python virtualenv to use
// define("PYTHON", ROOT . "ENV/bin/python");
// Path to prediction data dir from predict/ // Path to prediction data dir from predict/
define("PREDS_PATH", "preds/"); define("PREDS_PATH", "preds/");
@ -21,5 +21,6 @@ define("PREDS_PATH", "preds/");
define("SCENARIO_FILE", "scenario.ini"); define("SCENARIO_FILE", "scenario.ini");
define("FLIGHT_CSV", "flight_path.csv"); define("FLIGHT_CSV", "flight_path.csv");
define("PROGRESS_JSON", "progress.json"); define("PROGRESS_JSON", "progress.json");
define("LOG_FILE", "py_log");
?> ?>

Wyświetl plik

@ -23,15 +23,15 @@ function createModel($post_array) {
$pred_model['day'] = (int)$post_array['day']; $pred_model['day'] = (int)$post_array['day'];
$pred_model['year'] = (int)$post_array['year']; $pred_model['year'] = (int)$post_array['year'];
$pred_model['lat'] = $post_array['lat']; $pred_model['lat'] = (float)$post_array['lat'];
$pred_model['lon'] = $post_array['lon']; $pred_model['lon'] = (float)$post_array['lon'];
$pred_model['asc'] = (float)$post_array['ascent']; $pred_model['asc'] = (float)$post_array['ascent'];
$pred_model['alt'] = $post_array['initial_alt']; $pred_model['alt'] = (int)$post_array['initial_alt'];
$pred_model['des'] = (float)$post_array['drag']; $pred_model['des'] = (float)$post_array['drag'];
$pred_model['burst'] = $post_array['burst']; $pred_model['burst'] = (int)$post_array['burst'];
$pred_model['delta_lat'] = $post_array['delta_lat']; $pred_model['delta_lat'] = (int)$post_array['delta_lat'];
$pred_model['delta_lon'] = $post_array['delta_lon']; $pred_model['delta_lon'] = (int)$post_array['delta_lon'];
$pred_model['wind_error'] = 0; $pred_model['wind_error'] = 0;
@ -39,8 +39,8 @@ function createModel($post_array) {
// Make a timestamp of the form data // Make a timestamp of the form data
$pred_model['timestamp'] = mktime($pred_model['hour'], $pred_model['min'], $pred_model['timestamp'] = mktime($pred_model['hour'], $pred_model['min'],
$pred_model['sec'], (int)$pred_model['month'], $pred_model['day'], $pred_model['sec'], $pred_model['month'], $pred_model['day'],
(int)$pred_model['year'] - 2000); $pred_model['year'] - 2000);
// If all was good, return the prediction model // If all was good, return the prediction model
@ -75,7 +75,14 @@ function verifyModel( $pred_model, $software_available ) {
$return_array['msg'] = "The model asked for software that $return_array['msg'] = "The model asked for software that
does not exist on this server"; does not exist on this server";
} }
} else if ( $idx == "delta_lat" || $idx == "delta_lon" ) { }
else if ( !is_numeric( $value ) ) {
$return_array['valid'] = false;
$return_array['msg'] = "A value that should have been numeric
did not validate as such";
}
if ( $idx == "delta_lat" || $idx == "delta_lon" ) {
if ( $value < 1 || $value > 10 ) { if ( $value < 1 || $value > 10 ) {
$return_array['valid'] = false; $return_array['valid'] = false;
$return_array['msg'] = "The latitude or longitude deltas $return_array['msg'] = "The latitude or longitude deltas
@ -87,10 +94,6 @@ function verifyModel( $pred_model, $software_available ) {
$return_array['msg'] = "The ascent and descent rates cannot $return_array['msg'] = "The ascent and descent rates cannot
be zero or negative"; be zero or negative";
} }
} else if ( !is_numeric( $value ) ) {
$return_array['valid'] = false;
$return_array['msg'] = "A value that should have been numeric
did not validate as such";
} }
} }
@ -131,12 +134,16 @@ function runPred($pred_model) {
$predictor_lat = number_format($pred_model['lat'], 0); $predictor_lat = number_format($pred_model['lat'], 0);
$predictor_lon = number_format($pred_model['lon'], 0); $predictor_lon = number_format($pred_model['lon'], 0);
$sh = ROOT . "/predict.py --cd=" . ROOT . " --fork --alarm -v --latdelta=" $log = PREDS_PATH . $pred_model['uuid'] . "/" . LOG_FILE;
$sh = ROOT . "/predict.py --cd=" . ROOT . " --fork --alarm --redirect=predict/$log -v --latdelta="
.$pred_model['delta_lat']." --londelta=".$pred_model['delta_lon'] .$pred_model['delta_lat']." --londelta=".$pred_model['delta_lon']
." -p1 -f5 -t ".$pred_model['timestamp'] ." -p1 -f5 -t ".$pred_model['timestamp']
." --lat=".$predictor_lat." --lon=".$predictor_lon." " . $use_hd ." --lat=".$predictor_lat." --lon=".$predictor_lon." " . $use_hd
. $pred_model['uuid']; . $pred_model['uuid'];
if (DEBUG) shell_exec("echo " . $sh . " > " . AT_LOG); if (defined("PYTHON"))
$sh = PYTHON . " " . $sh;
file_put_contents($log, "Command: " . $sh . "\n");
shell_exec($sh); shell_exec($sh);
} }

Wyświetl plik

@ -253,6 +253,8 @@ function processProgress(progress) {
clearInterval(ajaxEventHandle); clearInterval(ajaxEventHandle);
appendDebug("There was an error in running the prediction: " appendDebug("There was an error in running the prediction: "
+ progress['error']); + progress['error']);
resetGUI();
toggleWindow("scenario_template", "showHideDebug", "Show Debug", "Hide Debug", "show");
} else { } else {
// get the progress of the wind data // get the progress of the wind data
if ( progress['gfs_complete'] == true ) { if ( progress['gfs_complete'] == true ) {

Wyświetl plik

@ -14,10 +14,10 @@
"latitude" : -34.9499, "latitude" : -34.9499,
"longitude" : 138.5194 "longitude" : 138.5194
}, },
"Elvington": { "Brightwalton": {
"altitude" : 0, "altitude" : 0,
"latitude" : 53.9231, "latitude" : 51.51143,
"longitude" : -0.9935 "longitude" : -1.38870
}, },
"Boston Spa": { "Boston Spa": {
"latitude" : 53.8997, "latitude" : 53.8997,
@ -33,5 +33,10 @@
"altitude" : 0, "altitude" : 0,
"latitude" : 54.654118, "latitude" : 54.654118,
"longitude" : -7.034914 "longitude" : -7.034914
},
"Preston St Mary": {
"latitude" : 52.1215,
"longitude" : 0.8078,
"altitude" : 70
} }
} }