kopia lustrzana https://github.com/projecthorus/chasemapper
Added Tawhiri predictions. Added geo link for landing position
rodzic
fb39078412
commit
e6c314109a
|
@ -25,7 +25,8 @@ default_config = {
|
|||
'thunderforest_api_key': 'none',
|
||||
|
||||
# Predictor settings
|
||||
'pred_enabled': False, # Enable running and display of predicted flight paths.
|
||||
'pred_enabled': True, # Enable running and display of predicted flight paths.
|
||||
'offline_predictions': False, # Use an offline GFS model and predictor instead of Tawhiri.
|
||||
# Default prediction settings (actual values will be used once the flight is underway)
|
||||
'pred_model': "Disabled",
|
||||
'pred_desc_rate': 6.0,
|
||||
|
@ -87,6 +88,7 @@ def parse_config_file(filename):
|
|||
|
||||
# Predictor
|
||||
chase_config['pred_enabled'] = config.getboolean('predictor', 'predictor_enabled')
|
||||
chase_config['offline_predictions'] = config.getboolean('predictor', 'offline_predictions')
|
||||
chase_config['pred_burst'] = config.getfloat('predictor', 'default_burst')
|
||||
chase_config['pred_desc_rate'] = config.getfloat('predictor', 'default_descent_rate')
|
||||
chase_config['pred_binary'] = config.get('predictor','pred_binary')
|
||||
|
|
|
@ -0,0 +1,134 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Project Horus - Browser-Based Chase Mapper - Tawhiri Interface
|
||||
#
|
||||
# Grab predictions from the Tawhiri Predictions API
|
||||
# Refer here for documentation on Tawhiri: https://tawhiri.readthedocs.io/en/latest/api.html
|
||||
#
|
||||
# Copyright (C) 2020 Mark Jessop <vk5qi@rfhead.net>
|
||||
# Released under GNU GPL v3 or later
|
||||
#
|
||||
import datetime
|
||||
import logging
|
||||
import pytz
|
||||
import requests
|
||||
import subprocess
|
||||
from dateutil.parser import parse
|
||||
from threading import Thread
|
||||
|
||||
TAWHIRI_API_URL = "http://predict.cusf.co.uk/api/v1/"
|
||||
|
||||
def get_tawhiri_prediction(
|
||||
launch_datetime,
|
||||
launch_latitude,
|
||||
launch_longitude,
|
||||
launch_altitude=0,
|
||||
ascent_rate=5.0,
|
||||
burst_altitude=30000.0,
|
||||
descent_rate=5.0,
|
||||
profile='standard_profile',
|
||||
dataset=None,
|
||||
timeout = 10
|
||||
):
|
||||
""" Request a Prediction from the Tawhiri Predictor API """
|
||||
|
||||
# Localise supplied time to UTC if not already done
|
||||
if launch_datetime.tzinfo is None:
|
||||
launch_datetime = pytz.utc.localize(launch_datetime)
|
||||
|
||||
# Create RFC3339-compliant timestamp
|
||||
_dt_rfc3339 = launch_datetime.isoformat()
|
||||
|
||||
|
||||
_params = {
|
||||
"launch_latitude": launch_latitude,
|
||||
"launch_longitude": launch_longitude,
|
||||
"launch_altitude": launch_altitude,
|
||||
"launch_datetime": _dt_rfc3339,
|
||||
"ascent_rate": ascent_rate,
|
||||
"descent_rate": descent_rate,
|
||||
"burst_altitude": burst_altitude,
|
||||
"profile": profile
|
||||
}
|
||||
|
||||
if dataset:
|
||||
_params["dataset"] = dataset
|
||||
|
||||
logging.debug("Tawhiri - Requesting prediction using parameters: %s" % str(_params))
|
||||
|
||||
try:
|
||||
_r = requests.get(TAWHIRI_API_URL, params=_params, timeout=timeout)
|
||||
|
||||
_json = _r.json()
|
||||
|
||||
if 'error' in _json:
|
||||
# The Tawhiri API has returned an error
|
||||
_error = "%s: %s" % (_json['error']['type'], _json['error']['description'])
|
||||
|
||||
logging.error("Tawhiri - %s" % _error)
|
||||
|
||||
return None
|
||||
|
||||
else:
|
||||
return parse_tawhiri_data(_json)
|
||||
|
||||
except Exception as e:
|
||||
logging.error("Tawhiri - Error running prediction: %s" % str(e))
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def parse_tawhiri_data(data):
|
||||
""" Parse a returned flight trajectory from Tawhiri, and convert it to a cusf_predictor_wrapper compatible format """
|
||||
|
||||
# Extract dataset information
|
||||
_dataset = parse(data['request']['dataset'])
|
||||
_dataset = _dataset.strftime("%Y%m%d%Hz")
|
||||
|
||||
|
||||
_path = []
|
||||
|
||||
for _stage in data['prediction']:
|
||||
_trajectory = _stage['trajectory']
|
||||
|
||||
for _point in _trajectory:
|
||||
_dt = parse(_point['datetime']).timestamp()
|
||||
_path.append([_dt, _point['latitude'], _point['longitude'], _point['altitude']])
|
||||
|
||||
|
||||
_output = {
|
||||
"dataset": _dataset,
|
||||
"path": _path
|
||||
}
|
||||
|
||||
return _output
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import datetime
|
||||
import pprint
|
||||
|
||||
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.INFO)
|
||||
|
||||
_now = datetime.datetime.utcnow()
|
||||
|
||||
|
||||
# Regular complete-flightpath prediction
|
||||
_data = get_tawhiri_prediction(
|
||||
launch_datetime=_now,
|
||||
launch_latitude=-34.9499,
|
||||
launch_longitude=138.5194,
|
||||
launch_altitude=0,
|
||||
)
|
||||
pprint.pprint(_data)
|
||||
|
||||
# Descent prediction
|
||||
_data = get_tawhiri_prediction(
|
||||
launch_datetime=_now,
|
||||
launch_latitude=-34.9499,
|
||||
launch_longitude=138.5194,
|
||||
launch_altitude=10000,
|
||||
burst_altitude=10001,
|
||||
descent_rate=abs(-6.0)
|
||||
)
|
||||
pprint.pprint(_data)
|
|
@ -82,17 +82,28 @@ thunderforest_api_key = none
|
|||
|
||||
|
||||
# Predictor Settings
|
||||
# Use of the predictor requires installing the CUSF Predictor Python Wrapper from here:
|
||||
# https://github.com/darksidelemm/cusf_predictor_wrapper
|
||||
# You also need to compile the predictor binary, and copy it into this directory.
|
||||
# By default this will attempt to get predictions from the online Tawhiri Predictions API.
|
||||
# Optionally, you can enable offline predictions below.
|
||||
[predictor]
|
||||
# Enable Predictor (True/False) - This can also be enabled from the web client.
|
||||
predictor_enabled = False
|
||||
predictor_enabled = True
|
||||
|
||||
# Predictor defaults - these can be modified at runtime in the web interface.
|
||||
default_burst = 30000
|
||||
default_descent_rate = 5.0
|
||||
|
||||
|
||||
# Offline Predictions
|
||||
# Use of the offline predictor requires installing the CUSF Predictor Python Wrapper from here:
|
||||
# https://github.com/darksidelemm/cusf_predictor_wrapper
|
||||
# You also need to compile the predictor binary, and copy it into this directory.
|
||||
#
|
||||
# Note: This setting turns offline predictions *on* by default, which assumes there is a valid
|
||||
# GFS dataset already present and available.
|
||||
# If you will be using the 'Download Model' button, then leave this at False, and Offline predictions
|
||||
# will be enabled once a valid model is available.
|
||||
offline_predictions = False
|
||||
|
||||
# Predictory Binary Location
|
||||
# Where to find the built CUSF predictor binary. This will usually be ./pred or pred.exe (on Windows)
|
||||
pred_binary = ./pred
|
||||
|
|
195
horusmapper.py
195
horusmapper.py
|
@ -29,6 +29,7 @@ from chasemapper.predictor import predictor_spawn_download, model_download_runni
|
|||
from chasemapper.habitat import HabitatChaseUploader, initListenerCallsign, uploadListenerPosition
|
||||
from chasemapper.logger import ChaseLogger
|
||||
from chasemapper.bearings import Bearings
|
||||
from chasemapper.tawhiri import get_tawhiri_prediction
|
||||
|
||||
|
||||
# Define Flask Application, and allow automatic reloading of templates for dev work
|
||||
|
@ -307,7 +308,10 @@ def run_prediction():
|
|||
''' Run a Flight Path prediction '''
|
||||
global chasemapper_config, current_payloads, current_payload_tracks, predictor, predictor_semaphore
|
||||
|
||||
if (predictor == None) or (chasemapper_config['pred_enabled'] == False):
|
||||
if (chasemapper_config['pred_enabled'] == False):
|
||||
return
|
||||
|
||||
if (chasemapper_config['offline_predictions'] == True) and (predictor == None):
|
||||
return
|
||||
|
||||
# Set the semaphore so we don't accidentally kill the predictor object while it's running.
|
||||
|
@ -338,8 +342,39 @@ def run_prediction():
|
|||
else:
|
||||
_burst_alt = chasemapper_config['pred_burst']
|
||||
|
||||
logging.info("Running Predictor for: %s." % _payload)
|
||||
_pred_path = predictor.predict(
|
||||
|
||||
if predictor == "Tawhiri":
|
||||
logging.info("Requesting Prediction from Tawhiri for %s." % _payload)
|
||||
# Tawhiri requires that the burst altitude always be higher than the starting altitude.
|
||||
if _current_pos['is_descending']:
|
||||
_burst_alt = _current_pos['alt'] + 1
|
||||
|
||||
# Tawhiri requires that the ascent rate be > 0 for standard profiles.
|
||||
if _current_pos['ascent_rate'] < 0.1:
|
||||
_current_pos['ascent_rate'] = 0.1
|
||||
|
||||
_tawhiri = get_tawhiri_prediction(
|
||||
launch_datetime=_current_pos['time'],
|
||||
launch_latitude=_current_pos['lat'],
|
||||
launch_longitude=_current_pos['lon'],
|
||||
launch_altitude=_current_pos['alt'],
|
||||
burst_altitude=_burst_alt,
|
||||
ascent_rate=_current_pos['ascent_rate'],
|
||||
descent_rate=_desc_rate,
|
||||
)
|
||||
|
||||
if _tawhiri:
|
||||
_pred_path = _tawhiri['path']
|
||||
_dataset = _tawhiri['dataset'] + " (Online)"
|
||||
# Inform the client of the dataset age
|
||||
flask_emit_event('predictor_model_update',{'model':_dataset})
|
||||
|
||||
else:
|
||||
_pred_path = []
|
||||
|
||||
else:
|
||||
logging.info("Running Offline Predictor for %s." % _payload)
|
||||
_pred_path = predictor.predict(
|
||||
launch_lat=_current_pos['lat'],
|
||||
launch_lon=_current_pos['lon'],
|
||||
launch_alt=_current_pos['alt'],
|
||||
|
@ -383,17 +418,42 @@ def run_prediction():
|
|||
|
||||
# Abort predictions
|
||||
if chasemapper_config['show_abort'] and (_current_pos['alt'] < chasemapper_config['pred_burst']) and (_current_pos['is_descending'] == False):
|
||||
logging.info("Running Abort Predictor for: %s." % _payload)
|
||||
|
||||
_abort_pred_path = predictor.predict(
|
||||
launch_lat=_current_pos['lat'],
|
||||
launch_lon=_current_pos['lon'],
|
||||
launch_alt=_current_pos['alt'],
|
||||
if predictor == "Tawhiri":
|
||||
logging.info("Requesting Abort Prediction from Tawhiri for %s." % _payload)
|
||||
|
||||
# Tawhiri requires that the ascent rate be > 0 for standard profiles.
|
||||
if _current_pos['ascent_rate'] < 0.1:
|
||||
_current_pos['ascent_rate'] = 0.1
|
||||
|
||||
_tawhiri = get_tawhiri_prediction(
|
||||
launch_datetime=_current_pos['time'],
|
||||
launch_latitude=_current_pos['lat'],
|
||||
launch_longitude=_current_pos['lon'],
|
||||
launch_altitude=_current_pos['alt'],
|
||||
burst_altitude=_burst_alt + 200,
|
||||
ascent_rate=_current_pos['ascent_rate'],
|
||||
descent_rate=_desc_rate,
|
||||
burst_alt=_current_pos['alt']+200,
|
||||
launch_time=_current_pos['time'],
|
||||
descent_mode=_current_pos['is_descending'])
|
||||
)
|
||||
|
||||
if _tawhiri:
|
||||
_abort_pred_path = _tawhiri['path']
|
||||
|
||||
else:
|
||||
_abort_pred_path = []
|
||||
|
||||
else:
|
||||
logging.info("Running Offline Abort Predictor for: %s." % _payload)
|
||||
|
||||
_abort_pred_path = predictor.predict(
|
||||
launch_lat=_current_pos['lat'],
|
||||
launch_lon=_current_pos['lon'],
|
||||
launch_alt=_current_pos['alt'],
|
||||
ascent_rate=_current_pos['ascent_rate'],
|
||||
descent_rate=_desc_rate,
|
||||
burst_alt=_current_pos['alt']+200,
|
||||
launch_time=_current_pos['time'],
|
||||
descent_mode=_current_pos['is_descending'])
|
||||
|
||||
if len(_pred_path) > 1:
|
||||
# Valid Prediction!
|
||||
|
@ -439,56 +499,73 @@ def run_prediction():
|
|||
|
||||
def initPredictor():
|
||||
global predictor, predictor_thread, chasemapper_config, pred_settings
|
||||
try:
|
||||
from cusfpredict.predict import Predictor
|
||||
from cusfpredict.utils import gfs_model_age, available_gfs
|
||||
|
||||
# Check if we have any GFS data
|
||||
_model_age = gfs_model_age(pred_settings['gfs_path'])
|
||||
if _model_age == "Unknown":
|
||||
logging.error("No GFS data in directory.")
|
||||
chasemapper_config['pred_model'] = "No GFS Data."
|
||||
flask_emit_event('predictor_model_update',{'model':"No GFS data."})
|
||||
chasemapper_config['pred_enabled'] = False
|
||||
else:
|
||||
# Check model contains data to at least 4 hours into the future.
|
||||
(_model_start, _model_end) = available_gfs(pred_settings['gfs_path'])
|
||||
_model_now = datetime.utcnow() + timedelta(0,60*60*4)
|
||||
if (_model_now < _model_start) or (_model_now > _model_end):
|
||||
# No suitable GFS data!
|
||||
logging.error("GFS Data in directory does not cover now!")
|
||||
chasemapper_config['pred_model'] = "Old GFS Data."
|
||||
flask_emit_event('predictor_model_update',{'model':"Old GFS data."})
|
||||
chasemapper_config['pred_enabled'] = False
|
||||
|
||||
if chasemapper_config['offline_predictions']:
|
||||
# Attempt to initialize an Offline Predictor instance
|
||||
try:
|
||||
from cusfpredict.predict import Predictor
|
||||
from cusfpredict.utils import gfs_model_age, available_gfs
|
||||
|
||||
# Check if we have any GFS data
|
||||
_model_age = gfs_model_age(pred_settings['gfs_path'])
|
||||
if _model_age == "Unknown":
|
||||
logging.error("No GFS data in directory.")
|
||||
chasemapper_config['pred_model'] = "No GFS Data."
|
||||
flask_emit_event('predictor_model_update',{'model':"No GFS data."})
|
||||
chasemapper_config['offline_predictions'] = False
|
||||
else:
|
||||
chasemapper_config['pred_model'] = _model_age
|
||||
flask_emit_event('predictor_model_update',{'model':_model_age})
|
||||
predictor = Predictor(bin_path=pred_settings['pred_binary'], gfs_path=pred_settings['gfs_path'])
|
||||
# Check model contains data to at least 4 hours into the future.
|
||||
(_model_start, _model_end) = available_gfs(pred_settings['gfs_path'])
|
||||
_model_now = datetime.utcnow() + timedelta(0,60*60*4)
|
||||
if (_model_now < _model_start) or (_model_now > _model_end):
|
||||
# No suitable GFS data!
|
||||
logging.error("GFS Data in directory does not cover now!")
|
||||
chasemapper_config['pred_model'] = "Old GFS Data."
|
||||
flask_emit_event('predictor_model_update',{'model':"Old GFS data."})
|
||||
chasemapper_config['offline_predictions'] = False
|
||||
|
||||
# Start up the predictor thread if it is not running.
|
||||
if predictor_thread == None:
|
||||
predictor_thread = Thread(target=predictorThread)
|
||||
predictor_thread.start()
|
||||
else:
|
||||
chasemapper_config['pred_model'] = _model_age + " (Offline)"
|
||||
flask_emit_event('predictor_model_update',{'model':_model_age + " (Offline)"})
|
||||
predictor = Predictor(bin_path=pred_settings['pred_binary'], gfs_path=pred_settings['gfs_path'])
|
||||
|
||||
# Set the predictor to enabled, and update the clients.
|
||||
chasemapper_config['pred_enabled'] = True
|
||||
# Start up the predictor thread if it is not running.
|
||||
if predictor_thread == None:
|
||||
predictor_thread = Thread(target=predictorThread)
|
||||
predictor_thread.start()
|
||||
|
||||
# Set the predictor to enabled, and update the clients.
|
||||
chasemapper_config['offline_predictions'] = True
|
||||
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
logging.error("Loading predictor failed: " + str(e))
|
||||
flask_emit_event('predictor_model_update',{'model':"Failed - Check Log."})
|
||||
chasemapper_config['pred_model'] = "Failed - Check Log."
|
||||
print("Loading Predictor failed.")
|
||||
predictor = None
|
||||
|
||||
flask_emit_event('server_settings_update', chasemapper_config)
|
||||
else:
|
||||
# No initialization required for the online predictor
|
||||
predictor = "Tawhiri"
|
||||
flask_emit_event('predictor_model_update',{'model':"Tawhiri"})
|
||||
|
||||
# Start up the predictor thread if it is not running.
|
||||
if predictor_thread == None:
|
||||
predictor_thread = Thread(target=predictorThread)
|
||||
predictor_thread.start()
|
||||
|
||||
|
||||
flask_emit_event('server_settings_update', chasemapper_config)
|
||||
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
logging.error("Loading predictor failed: " + str(e))
|
||||
flask_emit_event('predictor_model_update',{'model':"Failed - Check Log."})
|
||||
chasemapper_config['pred_model'] = "Failed - Check Log."
|
||||
print("Loading Predictor failed.")
|
||||
predictor = None
|
||||
|
||||
|
||||
def model_download_finished(result):
|
||||
""" Callback for when the model download is finished """
|
||||
global chasemapper_config
|
||||
if result == "OK":
|
||||
# Downloader reported OK, restart the predictor.
|
||||
chasemapper_config["offline_predictions"] = True
|
||||
initPredictor()
|
||||
else:
|
||||
# Downloader reported an error, pass on to the client.
|
||||
|
@ -834,15 +911,17 @@ class WebHandler(logging.Handler):
|
|||
|
||||
def emit(self, record):
|
||||
""" Emit a log message via SocketIO """
|
||||
if 'socket.io' not in record.msg:
|
||||
# Convert log record into a dictionary
|
||||
log_data = {
|
||||
'level': record.levelname,
|
||||
'timestamp': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
|
||||
'msg': record.msg
|
||||
}
|
||||
# Emit to all socket.io clients
|
||||
socketio.emit('log_event', log_data, namespace='/chasemapper')
|
||||
# Deal with log records with no content.
|
||||
if record.msg:
|
||||
if 'socket.io' not in record.msg:
|
||||
# Convert log record into a dictionary
|
||||
log_data = {
|
||||
'level': record.levelname,
|
||||
'timestamp': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
|
||||
'msg': record.msg
|
||||
}
|
||||
# Emit to all socket.io clients
|
||||
socketio.emit('log_event', log_data, namespace='/chasemapper')
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -56,6 +56,12 @@ html, body, #map {
|
|||
width: 10em;
|
||||
}
|
||||
|
||||
.predictorModelValue {
|
||||
display: inline-block;
|
||||
margin-left: auto;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.timeToLanding {
|
||||
color:red;
|
||||
font-weight: bold;
|
||||
|
|
|
@ -37,7 +37,7 @@ function serverSettingsUpdate(data){
|
|||
// Accept a json blob of settings data from the client, and update our local store.
|
||||
chase_config = data;
|
||||
// Update a few fields based on this data.
|
||||
$("#predictorModel").html("<b>Current Model: </b>" + chase_config.pred_model);
|
||||
$("#predictorModelValue").text(chase_config.pred_model);
|
||||
$('#burstAlt').val(chase_config.pred_burst.toFixed(0));
|
||||
$('#descentRate').val(chase_config.pred_desc_rate.toFixed(1));
|
||||
$('#predUpdateRate').val(chase_config.pred_update_rate.toFixed(0));
|
||||
|
|
|
@ -99,6 +99,13 @@ function telemetryTableDialog(e, row){
|
|||
var _last_pos = balloon_positions[callsign].latest_data.position;
|
||||
$('#telemDialogPosition').html("<a href='geo:" + _last_pos[0].toFixed(5) + "," + _last_pos[1].toFixed(5) + "'>" + _last_pos[0].toFixed(5) + ", " + _last_pos[1].toFixed(5) + "</a>");
|
||||
|
||||
if(balloon_positions[callsign].pred_marker != null){
|
||||
var _pred_latlng = balloon_positions[callsign].pred_marker.getLatLng();
|
||||
$('#telemDialogPredPosition').html("<a href='geo:" + _pred_latlng.lat.toFixed(5) + "," + _pred_latlng.lng.toFixed(5) + "'>" + _pred_latlng.lat.toFixed(5) + ", " + _pred_latlng.lng.toFixed(5) + "</a>");
|
||||
|
||||
}else{
|
||||
$('#telemDialogPredPosition').html("Unknown");
|
||||
}
|
||||
|
||||
var _buttons = {
|
||||
"Follow": function() {
|
||||
|
|
|
@ -451,7 +451,7 @@
|
|||
// Predictor Functions
|
||||
socket.on('predictor_model_update', function(data){
|
||||
var _model_data = data.model;
|
||||
$("#predictorModel").html("<b>Current Model: </b>" + _model_data);
|
||||
$("#predictorModelValue").text(_model_data);
|
||||
});
|
||||
|
||||
socket.on('predictor_update', function(data){
|
||||
|
@ -611,6 +611,9 @@
|
|||
<div class="paramRow">
|
||||
<b>Current Position:</b> <div style='float:right;' id='telemDialogPosition'></div><br/>
|
||||
</div>
|
||||
<div class="paramRow">
|
||||
<b>Predicted Landing:</b> <div style='float:right;' id='telemDialogPredPosition'></div><br/>
|
||||
</div>
|
||||
<div class="paramRow">
|
||||
<b>Select Action:</b><br/>
|
||||
</div>
|
||||
|
@ -676,7 +679,7 @@
|
|||
</hr>
|
||||
<h3>Predictor</h3>
|
||||
<div class="paramRow" id="predictorModel">
|
||||
<b>Current Model: </b> Predictor Disabled
|
||||
<b>Current Model: </b> <div class="predictorModelValue" id="predictorModelValue">Disabled</div>
|
||||
</div>
|
||||
<div class="paramRow">
|
||||
<b>Download Model</b> <button type="button" class="paramSelector" id="downloadModel">Download</button>
|
||||
|
|
Ładowanie…
Reference in New Issue