Merge pull request #53 from projecthorus/model_download

Added experimental model_download URL
master
Mark Jessop 2024-09-12 21:39:24 +09:30 zatwierdzone przez GitHub
commit cfd4ec10ca
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: B5690EEEBB952194
3 zmienionych plików z 24 dodań i 5 usunięć

Wyświetl plik

@ -8,4 +8,4 @@
# Now using Semantic Versioning (https://semver.org/) MAJOR.MINOR.PATCH # Now using Semantic Versioning (https://semver.org/) MAJOR.MINOR.PATCH
__version__ = "1.5.3" __version__ = "1.5.4"

Wyświetl plik

@ -149,6 +149,7 @@ ascent_rate_averaging = 10
# GFS dataset already present and available. # GFS dataset already present and available.
# If you will be using the 'Download Model' button, then leave this at False, and Offline predictions # If you will be using the 'Download Model' button, then leave this at False, and Offline predictions
# will be enabled once a valid model is available. # will be enabled once a valid model is available.
# Downloading of a new model can also be triggered by running: curl http://localhost:5001/download_model
offline_predictions = False offline_predictions = False
# Predictory Binary Location # Predictory Binary Location
@ -160,9 +161,9 @@ gfs_directory = ./gfs/
# Wind Model Download Command # Wind Model Download Command
# Optional command to enable downloading of wind data via a web client button. # Optional command to enable downloading of wind data via a web client button.
# Example: (this will require copying the get_wind_data.py script to this dirctory) # Example:
# model_download = python3 -m cusfpredict.gfs --lat=-33 --lon=139 --latdelta=10 --londelta=10 -f 24 -m 0p50 -o gfs # model_download = python3 -m cusfpredict.gfs --lat=-33 --lon=139 --latdelta=10 --londelta=10 -f 24 -m 0p50 -o gfs
# The gfs directory (above) will be cleared of all .dat files prior to the above command being run. # The gfs directory (above) will be cleared of all data files once the new model is downloaded.
model_download = none model_download = none

Wyświetl plik

@ -486,7 +486,8 @@ def run_prediction():
current_payloads[_payload]["pred_path"] = [] current_payloads[_payload]["pred_path"] = []
current_payloads[_payload]["pred_landing"] = [] current_payloads[_payload]["pred_landing"] = []
current_payloads[_payload]["burst"] = [] current_payloads[_payload]["burst"] = []
logging.error("Prediction Failed.") logging.error("Prediction Failed, possible invalid or missing dataset.")
flask_emit_event("predictor_model_update", {"model": "Dataset invalid."})
# Abort predictions # Abort predictions
if ( if (
@ -550,9 +551,10 @@ def run_prediction():
"Abort Prediction Updated, %d data points." % len(_pred_path) "Abort Prediction Updated, %d data points." % len(_pred_path)
) )
else: else:
logging.error("Prediction Failed.")
current_payloads[_payload]["abort_path"] = [] current_payloads[_payload]["abort_path"] = []
current_payloads[_payload]["abort_landing"] = [] current_payloads[_payload]["abort_landing"] = []
logging.error("Prediction Failed, possible invalid or missing dataset.")
flask_emit_event("predictor_model_update", {"model": "Dataset invalid."})
else: else:
# Zero the abort path and landing # Zero the abort path and landing
current_payloads[_payload]["abort_path"] = [] current_payloads[_payload]["abort_path"] = []
@ -678,6 +680,22 @@ def download_new_model(data):
flask_emit_event("predictor_model_update", {"model": _status}) flask_emit_event("predictor_model_update", {"model": _status})
@app.route("/download_model")
def download_new_model_2():
""" Trigger a download of a new weather model via a GET request """
global pred_settings, model_download_running
logging.info("Web Client Initiated request for new predictor data via /download_model.")
if pred_settings["pred_model_download"] == "none":
logging.info("No GFS model download command specified.")
return "No model download cmd."
else:
_model_cmd = pred_settings["pred_model_download"]
_status = predictor_spawn_download(_model_cmd, model_download_finished)
return _status
# Data Clearing Functions # Data Clearing Functions
@socketio.on("payload_data_clear", namespace="/chasemapper") @socketio.on("payload_data_clear", namespace="/chasemapper")
def clear_payload_data(data): def clear_payload_data(data):