Style fixes, changed some log levels.

pull/27/head
Mark Jessop 2021-01-16 15:18:27 +10:30
rodzic a2c082a169
commit 4fea3f36dd
14 zmienionych plików z 1287 dodań i 1033 usunięć

Wyświetl plik

@ -2,116 +2,134 @@
#
# Project Horus - Atmosphere / Descent Rate Modelling
#
# Copyright (C) 2018 Mark Jessop <vk5qi@rfhead.net>
# Released under GNU GPL v3 or later
# Copyright (C) 2018 Mark Jessop <vk5qi@rfhead.net>
# Released under GNU GPL v3 or later
#
import math
def getDensity(altitude):
'''
"""
Calculate the atmospheric density for a given altitude in metres.
This is a direct port of the oziplotter Atmosphere class
'''
"""
#Constants
airMolWeight = 28.9644 # Molecular weight of air
densitySL = 1.225 # Density at sea level [kg/m3]
pressureSL = 101325 # Pressure at sea level [Pa]
temperatureSL = 288.15 # Temperature at sea level [deg K]
gamma = 1.4
gravity = 9.80665 # Acceleration of gravity [m/s2]
tempGrad = -0.0065 # Temperature gradient [deg K/m]
RGas = 8.31432 # Gas constant [kg/Mol/K]
R = 287.053
deltaTemperature = 0.0
# Constants
airMolWeight = 28.9644 # Molecular weight of air
densitySL = 1.225 # Density at sea level [kg/m3]
pressureSL = 101325 # Pressure at sea level [Pa]
temperatureSL = 288.15 # Temperature at sea level [deg K]
gamma = 1.4
gravity = 9.80665 # Acceleration of gravity [m/s2]
tempGrad = -0.0065 # Temperature gradient [deg K/m]
RGas = 8.31432 # Gas constant [kg/Mol/K]
R = 287.053
deltaTemperature = 0.0
# Lookup Tables
altitudes = [0, 11000, 20000, 32000, 47000, 51000, 71000, 84852]
pressureRels = [1, 2.23361105092158e-1, 5.403295010784876e-2, 8.566678359291667e-3, 1.0945601337771144e-3, 6.606353132858367e-4, 3.904683373343926e-5, 3.6850095235747942e-6]
temperatures = [288.15, 216.65, 216.65, 228.65, 270.65, 270.65, 214.65, 186.946]
tempGrads = [-6.5, 0, 1, 2.8, 0, -2.8, -2, 0]
gMR = gravity * airMolWeight / RGas
# Lookup Tables
altitudes = [0, 11000, 20000, 32000, 47000, 51000, 71000, 84852]
pressureRels = [
1,
2.23361105092158e-1,
5.403295010784876e-2,
8.566678359291667e-3,
1.0945601337771144e-3,
6.606353132858367e-4,
3.904683373343926e-5,
3.6850095235747942e-6,
]
temperatures = [288.15, 216.65, 216.65, 228.65, 270.65, 270.65, 214.65, 186.946]
tempGrads = [-6.5, 0, 1, 2.8, 0, -2.8, -2, 0]
gMR = gravity * airMolWeight / RGas
# Pick a region to work in
i = 0
if(altitude > 0):
while (altitude > altitudes[i+1]):
i = i + 1
# Pick a region to work in
i = 0
if altitude > 0:
while altitude > altitudes[i + 1]:
i = i + 1
# Lookup based on region
baseTemp = temperatures[i]
tempGrad = tempGrads[i] / 1000.0
pressureRelBase = pressureRels[i]
deltaAltitude = altitude - altitudes[i]
temperature = baseTemp + tempGrad * deltaAltitude
# Lookup based on region
baseTemp = temperatures[i]
tempGrad = tempGrads[i] / 1000.0
pressureRelBase = pressureRels[i]
deltaAltitude = altitude - altitudes[i]
temperature = baseTemp + tempGrad * deltaAltitude
# Calculate relative pressure
if(math.fabs(tempGrad) < 1e-10):
pressureRel = pressureRelBase * math.exp(-1 *gMR * deltaAltitude / 1000.0 / baseTemp)
else:
pressureRel = pressureRelBase * math.pow(baseTemp / temperature, gMR / tempGrad / 1000.0)
# Calculate relative pressure
if math.fabs(tempGrad) < 1e-10:
pressureRel = pressureRelBase * math.exp(
-1 * gMR * deltaAltitude / 1000.0 / baseTemp
)
else:
pressureRel = pressureRelBase * math.pow(
baseTemp / temperature, gMR / tempGrad / 1000.0
)
# Add temperature offset
temperature = temperature + deltaTemperature
# Add temperature offset
temperature = temperature + deltaTemperature
# Finally, work out the density...
speedOfSound = math.sqrt(gamma * R * temperature)
pressure = pressureRel * pressureSL
density = densitySL * pressureRel * temperatureSL / temperature
# Finally, work out the density...
speedOfSound = math.sqrt(gamma * R * temperature)
pressure = pressureRel * pressureSL
density = densitySL * pressureRel * temperatureSL / temperature
return density
return density
def seaLevelDescentRate(descent_rate, altitude):
''' Calculate the descent rate at sea level, for a given descent rate at altitude '''
""" Calculate the descent rate at sea level, for a given descent rate at altitude """
rho = getDensity(altitude)
return math.sqrt((rho / 1.225) * math.pow(descent_rate, 2))
rho = getDensity(altitude)
return math.sqrt((rho / 1.225) * math.pow(descent_rate, 2))
def time_to_landing(
current_altitude, current_descent_rate=-5.0, ground_asl=0.0, step_size=1
):
""" Calculate an estimated time to landing (in seconds) of a payload, based on its current altitude and descent rate """
def time_to_landing(current_altitude, current_descent_rate=-5.0, ground_asl=0.0, step_size=1):
''' Calculate an estimated time to landing (in seconds) of a payload, based on its current altitude and descent rate '''
# A few checks on the input data.
if current_descent_rate > 0.0:
# If we are still ascending, return none.
return None
# A few checks on the input data.
if current_descent_rate > 0.0:
# If we are still ascending, return none.
return None
if current_altitude <= ground_asl:
# If the current altitude is *below* ground level, we have landed.
return 0
if current_altitude <= ground_asl:
# If the current altitude is *below* ground level, we have landed.
return 0
# Calculate the sea level descent rate.
_desc_rate = math.fabs(seaLevelDescentRate(current_descent_rate, current_altitude))
_drag_coeff = (
_desc_rate * 1.106797
) # Multiply descent rate by square root of sea-level air density (1.225).
# Calculate the sea level descent rate.
_desc_rate = math.fabs(seaLevelDescentRate(current_descent_rate, current_altitude))
_drag_coeff = _desc_rate*1.106797 # Multiply descent rate by square root of sea-level air density (1.225).
_alt = current_altitude
_start_time = 0
# Now step through the flight in <step_size> second steps.
# Once the altitude is below our ground level, stop, and return the elapsed time.
while _alt >= ground_asl:
_alt += step_size * -1 * (_drag_coeff / math.sqrt(getDensity(_alt)))
_start_time += step_size
return _start_time
_alt = current_altitude
_start_time = 0
# Now step through the flight in <step_size> second steps.
# Once the altitude is below our ground level, stop, and return the elapsed time.
while _alt >= ground_asl:
_alt += step_size * -1*(_drag_coeff/math.sqrt(getDensity(_alt)))
_start_time += step_size
if __name__ == "__main__":
# Test Cases
_altitudes = [1000, 10000, 30000, 1000, 10000, 30000]
_rates = [-10.0, -10.0, -10.0, -30.0, -30.0, -30.0]
return _start_time
if __name__ == '__main__':
# Test Cases
_altitudes = [1000, 10000, 30000, 1000, 10000, 30000]
_rates = [-10.0, -10.0, -10.0, -30.0, -30.0, -30.0]
for i in range(len(_altitudes)):
print("Altitude: %d m, Rate: %.2f m/s" % (_altitudes[i], _rates[i]))
print("Density: %.5f" % getDensity(_altitudes[i]))
print("Sea Level Descent Rate: %.2f m/s" % seaLevelDescentRate(_rates[i], _altitudes[i]))
_landing = time_to_landing(_altitudes[i],_rates[i])
_landing_min = _landing//60
_landing_sec = _landing%60
print("Time to landing: %d sec, %s:%s " % (_landing, _landing_min,_landing_sec))
print("")
for i in range(len(_altitudes)):
print("Altitude: %d m, Rate: %.2f m/s" % (_altitudes[i], _rates[i]))
print("Density: %.5f" % getDensity(_altitudes[i]))
print(
"Sea Level Descent Rate: %.2f m/s"
% seaLevelDescentRate(_rates[i], _altitudes[i])
)
_landing = time_to_landing(_altitudes[i], _rates[i])
_landing_min = _landing // 60
_landing_sec = _landing % 60
print(
"Time to landing: %d sec, %s:%s " % (_landing, _landing_min, _landing_sec)
)
print("")

Wyświetl plik

@ -5,7 +5,7 @@
# Copyright (C) 2019 Mark Jessop <vk5qi@rfhead.net>
# Released under GNU GPL v3 or later
#
#
#
# TODO:
# [ ] Store a rolling buffer of car positions, to enable fusing of 'old' bearings with previous car positions.
#
@ -17,21 +17,15 @@ from threading import Lock
class Bearings(object):
def __init__(self,
socketio_instance = None,
max_bearings = 300,
max_bearing_age = 30*60
):
def __init__(
self, socketio_instance=None, max_bearings=300, max_bearing_age=30 * 60
):
# Reference to the socketio instance which will be used to pass data onto web clients
self.sio = socketio_instance
self.max_bearings = max_bearings
self.max_age = max_bearing_age
# Bearing store
# Bearings are stored as a dict, with the key being the timestamp (time.time())
# when the bearing arrived in the system.
@ -52,23 +46,21 @@ class Bearings(object):
self.bearing_lock = Lock()
# Internal record of the chase car position, which is updated with incoming GPS data.
# If incoming bearings do not contain lat/lon information, we fuse them with this position,
# as long as it is valid.
self.current_car_position = {
'timestamp': None, # System timestamp from time.time()
'datetime': None, # Datetime object from data source.
'lat': 0.0,
'lon': 0.0,
'alt': 0.0,
'heading': 0.0,
'speed': 0.0,
'heading_valid': False,
'position_valid': False
"timestamp": None, # System timestamp from time.time()
"datetime": None, # Datetime object from data source.
"lat": 0.0,
"lon": 0.0,
"alt": 0.0,
"heading": 0.0,
"speed": 0.0,
"heading_valid": False,
"position_valid": False,
}
def update_car_position(self, position):
""" Accept a new car position, in the form of a dictionary produced by a GenericTrack object
(refer geometry.py). This is of the form:
@ -91,24 +83,24 @@ class Bearings(object):
# Attempt to build up new chase car position dict
try:
_car_pos = {
'timestamp': time.time(),
'datetime': position['time'],
'lat': position['lat'],
'lon': position['lon'],
'alt': position['alt'],
'heading': self.current_car_position['heading'],
'heading_valid': position['heading_valid'],
'speed': position['speed'],
'position_valid': True # Should we be taking this from upstream somewhere?
"timestamp": time.time(),
"datetime": position["time"],
"lat": position["lat"],
"lon": position["lon"],
"alt": position["alt"],
"heading": self.current_car_position["heading"],
"heading_valid": position["heading_valid"],
"speed": position["speed"],
"position_valid": True, # Should we be taking this from upstream somewhere?
}
# Only gate through the heading if it is valid.
if position['heading_valid']:
_car_pos['heading'] = position['heading']
if position["heading_valid"]:
_car_pos["heading"] = position["heading"]
# Mark position as invalid if we have zero lat/lon values
if (_car_pos['lat'] == 0.0) and (_car_pos['lon'] == 0.0):
_car_pos['position_valid'] = False
if (_car_pos["lat"] == 0.0) and (_car_pos["lon"] == 0.0):
_car_pos["position_valid"] = False
# Replace car position state with new data
self.current_car_position = _car_pos
@ -116,7 +108,6 @@ class Bearings(object):
except Exception as e:
logging.error("Bearing Handler - Invalid car position: %s" % str(e))
def add_bearing(self, bearing):
""" Add a bearing into the store, fusing incoming data with the latest car position as required.
@ -139,82 +130,77 @@ class Bearings(object):
"""
# Should never be passed a non-bearing dict, but check anyway,
if bearing['type'] != 'BEARING':
if bearing["type"] != "BEARING":
return
_arrival_time = time.time()
# Get a copy of the current car position, in case it is updated
_current_car_pos = self.current_car_position.copy()
if 'timestamp' in bearing:
_src_timestamp = bearing['timestamp']
if "timestamp" in bearing:
_src_timestamp = bearing["timestamp"]
else:
_src_timestamp = _arrival_time
if 'confidence' in bearing:
_confidence = bearing['confidence']
if "confidence" in bearing:
_confidence = bearing["confidence"]
else:
_confidence = 100.0
if 'power' in bearing:
_power = bearing['power']
if "power" in bearing:
_power = bearing["power"]
else:
_power = -1
if 'source' in bearing:
_source = bearing['source']
if "source" in bearing:
_source = bearing["source"]
else:
_source = 'unknown'
_source = "unknown"
try:
if bearing['bearing_type'] == 'relative':
if bearing["bearing_type"] == "relative":
# Relative bearing - we need to fuse this with the current car position.
# Temporary hack for KerberosSDR bearings, which are reflected across N/S
if _source == 'kerberos-sdr':
bearing['bearing'] = 360.0 - bearing['bearing']
bearing['raw_doa'] = bearing['raw_doa'][::-1]
if _source == "kerberos-sdr":
bearing["bearing"] = 360.0 - bearing["bearing"]
bearing["raw_doa"] = bearing["raw_doa"][::-1]
_new_bearing = {
'timestamp': _arrival_time,
'src_timestamp': _src_timestamp,
'lat': _current_car_pos['lat'],
'lon': _current_car_pos['lon'],
'speed': _current_car_pos['speed'],
'heading': _current_car_pos['heading'],
'heading_valid': _current_car_pos['heading_valid'],
'raw_bearing': bearing['bearing'],
'true_bearing': (bearing['bearing'] + _current_car_pos['heading']) % 360.0,
'confidence': _confidence,
'power': _power,
'source': _source
"timestamp": _arrival_time,
"src_timestamp": _src_timestamp,
"lat": _current_car_pos["lat"],
"lon": _current_car_pos["lon"],
"speed": _current_car_pos["speed"],
"heading": _current_car_pos["heading"],
"heading_valid": _current_car_pos["heading_valid"],
"raw_bearing": bearing["bearing"],
"true_bearing": (bearing["bearing"] + _current_car_pos["heading"])
% 360.0,
"confidence": _confidence,
"power": _power,
"source": _source,
}
elif bearing['bearing_type'] == 'absolute':
elif bearing["bearing_type"] == "absolute":
# Absolute bearing - use the provided data as-is
_new_bearing = {
'timestamp': _arrival_time,
'src_timestamp': _src_timestamp,
'lat': bearing['latitude'],
'lon': bearing['longitude'],
'speed': 0.0,
'heading': 0.0,
'heading_valid': True,
'raw_bearing': bearing['bearing'],
'true_bearing': bearing['bearing'],
'confidence': _confidence,
'power': _power,
'source': _source
"timestamp": _arrival_time,
"src_timestamp": _src_timestamp,
"lat": bearing["latitude"],
"lon": bearing["longitude"],
"speed": 0.0,
"heading": 0.0,
"heading_valid": True,
"raw_bearing": bearing["bearing"],
"true_bearing": bearing["bearing"],
"confidence": _confidence,
"power": _power,
"source": _source,
}
else:
return
@ -263,31 +249,21 @@ class Bearings(object):
self.bearing_lock.release()
# Add in any raw DOA data we may have been given.
if 'raw_bearing_angles' in bearing:
_new_bearing['raw_bearing_angles'] = bearing['raw_bearing_angles']
_new_bearing['raw_doa'] = bearing['raw_doa']
if "raw_bearing_angles" in bearing:
_new_bearing["raw_bearing_angles"] = bearing["raw_bearing_angles"]
_new_bearing["raw_doa"] = bearing["raw_doa"]
# Now we need to update the web clients on what has changed.
_client_update = {
'add': _new_bearing,
'remove': _removal_list,
'server_timestamp': time.time()
"add": _new_bearing,
"remove": _removal_list,
"server_timestamp": time.time(),
}
self.sio.emit('bearing_change', _client_update, namespace='/chasemapper')
self.sio.emit("bearing_change", _client_update, namespace="/chasemapper")
def flush(self):
""" Clear the bearing store """
self.bearing_lock.acquire()
self.bearings = {}
self.bearing_lock.release()

Wyświetl plik

@ -18,198 +18,232 @@ except ImportError:
default_config = {
# Start location for the map (until either a chase car position, or balloon position is available.)
'default_lat': -34.9,
'default_lon': 138.6,
'payload_max_age': 180,
'thunderforest_api_key': 'none',
"default_lat": -34.9,
"default_lon": 138.6,
"payload_max_age": 180,
"thunderforest_api_key": "none",
# Predictor settings
'pred_enabled': True, # Enable running and display of predicted flight paths.
'offline_predictions': False, # Use an offline GFS model and predictor instead of Tawhiri.
"pred_enabled": True, # Enable running and display of predicted flight paths.
"offline_predictions": False, # Use an offline GFS model and predictor instead of Tawhiri.
# Default prediction settings (actual values will be used once the flight is underway)
'pred_model': "Disabled",
'pred_desc_rate': 6.0,
'pred_burst': 28000,
'show_abort': True, # Show a prediction of an 'abort' paths (i.e. if the balloon bursts *now*)
'pred_update_rate': 15, # Update predictor every 15 seconds.
"pred_model": "Disabled",
"pred_desc_rate": 6.0,
"pred_burst": 28000,
"show_abort": True, # Show a prediction of an 'abort' paths (i.e. if the balloon bursts *now*)
"pred_update_rate": 15, # Update predictor every 15 seconds.
# Range Rings
'range_rings_enabled': False,
'range_ring_quantity': 5,
'range_ring_spacing': 1000,
'range_ring_weight': 1.5,
'range_ring_color': 'red',
'range_ring_custom_color': '#FF0000',
# Chase Car Speedometer
'chase_car_speed': True,
"range_rings_enabled": False,
"range_ring_quantity": 5,
"range_ring_spacing": 1000,
"range_ring_weight": 1.5,
"range_ring_color": "red",
"range_ring_custom_color": "#FF0000",
# Chase Car Speedometer
"chase_car_speed": True,
# Bearing processing
'max_bearings': 300,
'max_bearing_age': 30*60,
'car_speed_gate': 10,
'bearing_length': 10,
'bearing_weight': 1.0,
'bearing_color': 'black',
'bearing_custom_color': '#FF0000',
"max_bearings": 300,
"max_bearing_age": 30 * 60,
"car_speed_gate": 10,
"bearing_length": 10,
"bearing_weight": 1.0,
"bearing_color": "black",
"bearing_custom_color": "#FF0000",
# History
'reload_last_position': False
}
"reload_last_position": False,
}
def parse_config_file(filename):
""" Parse a Configuration File """
""" Parse a Configuration File """
chase_config = default_config.copy()
chase_config = default_config.copy()
config = RawConfigParser()
config.read(filename)
config = RawConfigParser()
config.read(filename)
# Map Defaults
chase_config['flask_host'] = config.get('map', 'flask_host')
chase_config['flask_port'] = config.getint('map', 'flask_port')
chase_config['default_lat'] = config.get('map', 'default_lat')
chase_config['default_lon'] = config.get('map', 'default_lon')
chase_config['payload_max_age'] = config.getint('map', 'payload_max_age')
chase_config['thunderforest_api_key'] = config.get('map', 'thunderforest_api_key')
# Map Defaults
chase_config["flask_host"] = config.get("map", "flask_host")
chase_config["flask_port"] = config.getint("map", "flask_port")
chase_config["default_lat"] = config.get("map", "default_lat")
chase_config["default_lon"] = config.get("map", "default_lon")
chase_config["payload_max_age"] = config.getint("map", "payload_max_age")
chase_config["thunderforest_api_key"] = config.get("map", "thunderforest_api_key")
# GPSD Settings
chase_config["car_gpsd_host"] = config.get("gpsd", "gpsd_host")
chase_config["car_gpsd_port"] = config.getint("gpsd", "gpsd_port")
# GPSD Settings
chase_config['car_gpsd_host'] = config.get('gpsd','gpsd_host')
chase_config['car_gpsd_port'] = config.getint('gpsd','gpsd_port')
# Serial GPS Settings
chase_config["car_serial_port"] = config.get("gps_serial", "gps_port")
chase_config["car_serial_baud"] = config.getint("gps_serial", "gps_baud")
# Serial GPS Settings
chase_config['car_serial_port'] = config.get('gps_serial', 'gps_port')
chase_config['car_serial_baud'] = config.getint('gps_serial', 'gps_baud')
# Habitat Settings
chase_config["habitat_upload_enabled"] = config.getboolean(
"habitat", "habitat_upload_enabled"
)
chase_config["habitat_call"] = config.get("habitat", "habitat_call")
chase_config["habitat_update_rate"] = config.getint(
"habitat", "habitat_update_rate"
)
# Habitat Settings
chase_config['habitat_upload_enabled'] = config.getboolean('habitat', 'habitat_upload_enabled')
chase_config['habitat_call'] = config.get('habitat', 'habitat_call')
chase_config['habitat_update_rate'] = config.getint('habitat', 'habitat_update_rate')
# Predictor
chase_config["pred_enabled"] = config.getboolean("predictor", "predictor_enabled")
chase_config["offline_predictions"] = config.getboolean(
"predictor", "offline_predictions"
)
chase_config["pred_burst"] = config.getfloat("predictor", "default_burst")
chase_config["pred_desc_rate"] = config.getfloat(
"predictor", "default_descent_rate"
)
chase_config["pred_binary"] = config.get("predictor", "pred_binary")
chase_config["pred_gfs_directory"] = config.get("predictor", "gfs_directory")
chase_config["pred_model_download"] = config.get("predictor", "model_download")
# Predictor
chase_config['pred_enabled'] = config.getboolean('predictor', 'predictor_enabled')
chase_config['offline_predictions'] = config.getboolean('predictor', 'offline_predictions')
chase_config['pred_burst'] = config.getfloat('predictor', 'default_burst')
chase_config['pred_desc_rate'] = config.getfloat('predictor', 'default_descent_rate')
chase_config['pred_binary'] = config.get('predictor','pred_binary')
chase_config['pred_gfs_directory'] = config.get('predictor', 'gfs_directory')
chase_config['pred_model_download'] = config.get('predictor', 'model_download')
# Range Ring Settings
chase_config["range_rings_enabled"] = config.getboolean(
"range_rings", "range_rings_enabled"
)
chase_config["range_ring_quantity"] = config.getint(
"range_rings", "range_ring_quantity"
)
chase_config["range_ring_spacing"] = config.getint(
"range_rings", "range_ring_spacing"
)
chase_config["range_ring_weight"] = config.getfloat(
"range_rings", "range_ring_weight"
)
chase_config["range_ring_color"] = config.get("range_rings", "range_ring_color")
chase_config["range_ring_custom_color"] = config.get(
"range_rings", "range_ring_custom_color"
)
# Range Ring Settings
chase_config['range_rings_enabled'] = config.getboolean('range_rings', 'range_rings_enabled')
chase_config['range_ring_quantity'] = config.getint('range_rings', 'range_ring_quantity')
chase_config['range_ring_spacing'] = config.getint('range_rings', 'range_ring_spacing')
chase_config['range_ring_weight'] = config.getfloat('range_rings', 'range_ring_weight')
chase_config['range_ring_color'] = config.get('range_rings', 'range_ring_color')
chase_config['range_ring_custom_color'] = config.get('range_rings', 'range_ring_custom_color')
# Bearing Processing
chase_config["max_bearings"] = config.getint("bearings", "max_bearings")
chase_config["max_bearing_age"] = (
config.getint("bearings", "max_bearing_age") * 60
) # Convert to seconds
if chase_config["max_bearing_age"] < 60:
chase_config[
"max_bearing_age"
] = 60 # Make sure this number is something sane, otherwise things will break
chase_config["car_speed_gate"] = (
config.getfloat("bearings", "car_speed_gate") / 3.6
) # Convert to m/s
chase_config["bearing_length"] = config.getfloat("bearings", "bearing_length")
chase_config["bearing_weight"] = config.getfloat("bearings", "bearing_weight")
chase_config["bearing_color"] = config.get("bearings", "bearing_color")
chase_config["bearing_custom_color"] = config.get(
"bearings", "bearing_custom_color"
)
# Bearing Processing
chase_config['max_bearings'] = config.getint('bearings', 'max_bearings')
chase_config['max_bearing_age'] = config.getint('bearings', 'max_bearing_age')*60 # Convert to seconds
if chase_config['max_bearing_age'] < 60:
chase_config['max_bearing_age'] = 60 # Make sure this number is something sane, otherwise things will break
chase_config['car_speed_gate'] = config.getfloat('bearings', 'car_speed_gate')/3.6 # Convert to m/s
chase_config['bearing_length'] = config.getfloat('bearings', 'bearing_length')
chase_config['bearing_weight'] = config.getfloat('bearings', 'bearing_weight')
chase_config['bearing_color'] = config.get('bearings', 'bearing_color')
chase_config['bearing_custom_color'] = config.get('bearings', 'bearing_custom_color')
# Offline Map Settings
chase_config["tile_server_enabled"] = config.getboolean(
"offline_maps", "tile_server_enabled"
)
chase_config["tile_server_path"] = config.get("offline_maps", "tile_server_path")
# Offline Map Settings
chase_config['tile_server_enabled'] = config.getboolean('offline_maps', 'tile_server_enabled')
chase_config['tile_server_path'] = config.get('offline_maps', 'tile_server_path')
# Determine valid offline map layers.
chase_config["offline_tile_layers"] = []
if chase_config["tile_server_enabled"]:
for _dir in os.listdir(chase_config["tile_server_path"]):
if os.path.isdir(os.path.join(chase_config["tile_server_path"], _dir)):
chase_config["offline_tile_layers"].append(_dir)
logging.info("Found Map Layers: %s" % str(chase_config["offline_tile_layers"]))
# Determine valid offline map layers.
chase_config['offline_tile_layers'] = []
if chase_config['tile_server_enabled']:
for _dir in os.listdir(chase_config['tile_server_path']):
if os.path.isdir(os.path.join(chase_config['tile_server_path'],_dir)):
chase_config['offline_tile_layers'].append(_dir)
logging.info("Found Map Layers: %s" % str(chase_config['offline_tile_layers']))
try:
chase_config["chase_car_speed"] = config.getboolean("speedo", "chase_car_speed")
except:
logging.info("Missing Chase Car Speedo Setting, using default (disabled)")
chase_config["chase_car_speed"] = False
try:
chase_config['chase_car_speed'] = config.getboolean('speedo', 'chase_car_speed')
except:
logging.info("Missing Chase Car Speedo Setting, using default (disabled)")
chase_config['chase_car_speed'] = False
# Telemetry Source Profiles
# Telemetry Source Profiles
_profile_count = config.getint("profile_selection", "profile_count")
_default_profile = config.getint("profile_selection", "default_profile")
_profile_count = config.getint('profile_selection', 'profile_count')
_default_profile = config.getint('profile_selection', 'default_profile')
chase_config["selected_profile"] = ""
chase_config["profiles"] = {}
chase_config['selected_profile'] = ""
chase_config['profiles'] = {}
# Unit Selection
chase_config["unitselection"] = config.get(
"units", "unitselection", fallback="metric"
)
if chase_config["unitselection"] != "imperial":
chase_config[
"unitselection"
] = "metric" # unless imperial is explicitly requested do metric
chase_config["switch_miles_feet"] = config.get(
"units", "switch_miles_feet", fallback="400"
)
# Unit Selection
for i in range(1, _profile_count + 1):
_profile_section = "profile_%d" % i
try:
_profile_name = config.get(_profile_section, "profile_name")
_profile_telem_source_type = config.get(
_profile_section, "telemetry_source_type"
)
_profile_telem_source_port = config.getint(
_profile_section, "telemetry_source_port"
)
_profile_car_source_type = config.get(_profile_section, "car_source_type")
_profile_car_source_port = config.getint(
_profile_section, "car_source_port"
)
chase_config['unitselection'] = config.get('units', 'unitselection', fallback='metric')
if chase_config['unitselection'] != "imperial":
chase_config['unitselection'] = 'metric' #unless imperial is explicitly requested do metric
chase_config['switch_miles_feet'] = config.get('units', 'switch_miles_feet', fallback = '400')
chase_config["profiles"][_profile_name] = {
"name": _profile_name,
"telemetry_source_type": _profile_telem_source_type,
"telemetry_source_port": _profile_telem_source_port,
"car_source_type": _profile_car_source_type,
"car_source_port": _profile_car_source_port,
}
if _default_profile == i:
chase_config["selected_profile"] = _profile_name
for i in range(1,_profile_count+1):
_profile_section = "profile_%d" % i
try:
_profile_name = config.get(_profile_section, 'profile_name')
_profile_telem_source_type = config.get(_profile_section, 'telemetry_source_type')
_profile_telem_source_port = config.getint(_profile_section, 'telemetry_source_port')
_profile_car_source_type = config.get(_profile_section, 'car_source_type')
_profile_car_source_port = config.getint(_profile_section, 'car_source_port')
except Exception as e:
logging.error("Error reading profile section %d - %s" % (i, str(e)))
chase_config['profiles'][_profile_name] = {
'name': _profile_name,
'telemetry_source_type': _profile_telem_source_type,
'telemetry_source_port': _profile_telem_source_port,
'car_source_type': _profile_car_source_type,
'car_source_port': _profile_car_source_port
}
if _default_profile == i:
chase_config['selected_profile'] = _profile_name
if len(chase_config["profiles"].keys()) == 0:
logging.critical("Could not read any profile data!")
return None
except Exception as e:
logging.error("Error reading profile section %d - %s" % (i, str(e)))
if len(chase_config['profiles'].keys()) == 0:
logging.critical("Could not read any profile data!")
return None
if chase_config['selected_profile'] not in chase_config['profiles']:
logging.critical("Default profile selection does not exist.")
return None
if chase_config["selected_profile"] not in chase_config["profiles"]:
logging.critical("Default profile selection does not exist.")
return None
# History
chase_config['reload_last_position'] = config.getboolean('history', 'reload_last_position', fallback=False)
return chase_config
chase_config["reload_last_position"] = config.getboolean(
"history", "reload_last_position", fallback=False
)
return chase_config
def read_config(filename, default_cfg="horusmapper.cfg.example"):
""" Read in a Horus Mapper configuration file,and return as a dict. """
""" Read in a Horus Mapper configuration file,and return as a dict. """
try:
config_dict = parse_config_file(filename)
except Exception as e:
logging.error("Could not parse %s, trying default: %s" % (filename, str(e)))
try:
config_dict = parse_config_file(default_cfg)
except Exception as e:
logging.critical("Could not parse example config file! - %s" % str(e))
config_dict = None
try:
config_dict = parse_config_file(filename)
except Exception as e:
logging.error("Could not parse %s, trying default: %s" % (filename, str(e)))
try:
config_dict = parse_config_file(default_cfg)
except Exception as e:
logging.critical("Could not parse example config file! - %s" % str(e))
config_dict = None
return config_dict
return config_dict
if __name__ == "__main__":
import sys
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', stream=sys.stdout, level=logging.DEBUG)
print(read_config(sys.argv[1]))
import sys
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s",
stream=sys.stdout,
level=logging.DEBUG,
)
print(read_config(sys.argv[1]))

Wyświetl plik

@ -9,6 +9,7 @@
from math import radians, degrees, sin, cos, atan2, sqrt, pi
def position_info(listener, balloon):
"""
Calculate and return information from 2 (lat, lon, alt) tuples
@ -28,8 +29,8 @@ def position_info(listener, balloon):
"""
# Earth:
#radius = 6371000.0
radius = 6364963.0 # Optimized for Australia :-)
# radius = 6371000.0
radius = 6364963.0 # Optimized for Australia :-)
(lat1, lon1, alt1) = listener
(lat2, lon2, alt2) = balloon
@ -79,7 +80,8 @@ def position_info(listener, balloon):
bearing += 2 * pi
return {
"listener": listener, "balloon": balloon,
"listener": listener,
"balloon": balloon,
"listener_radians": (lat1, lon1, alt1),
"balloon_radians": (lat2, lon2, alt2),
"angle_at_centre": degrees(angle_at_centre),
@ -89,7 +91,7 @@ def position_info(listener, balloon):
"great_circle_distance": great_circle_distance,
"straight_distance": distance,
"elevation": degrees(elevation),
"elevation_radians": elevation
"elevation_radians": elevation,
}
@ -134,4 +136,4 @@ def bearing_to_cardinal(bearing):
else:
cardinal = "?"
return cardinal
return cardinal

Wyświetl plik

@ -21,11 +21,10 @@ class GenericTrack(object):
The track history can be exported to a LineString using the to_line_string method.
"""
def __init__(self,
ascent_averaging = 6,
landing_rate = 5.0,
heading_gate_threshold = 0.0):
''' Create a GenericTrack Object. '''
def __init__(
self, ascent_averaging=6, landing_rate=5.0, heading_gate_threshold=0.0
):
""" Create a GenericTrack Object. """
# Averaging rate.
self.ASCENT_AVERAGING = ascent_averaging
@ -44,20 +43,19 @@ class GenericTrack(object):
# Data is stored as a list-of-lists, with elements of [datetime, lat, lon, alt, comment]
self.track_history = []
def add_telemetry(self,data_dict):
'''
def add_telemetry(self, data_dict):
"""
Accept telemetry data as a dictionary with fields
datetime, lat, lon, alt, comment
'''
"""
try:
_datetime = data_dict['time']
_lat = data_dict['lat']
_lon = data_dict['lon']
_alt = data_dict['alt']
if 'comment' in data_dict.keys():
_comment = data_dict['comment']
_datetime = data_dict["time"]
_lat = data_dict["lat"]
_lon = data_dict["lon"]
_alt = data_dict["alt"]
if "comment" in data_dict.keys():
_comment = data_dict["comment"]
else:
_comment = ""
@ -65,103 +63,116 @@ class GenericTrack(object):
self.update_states()
# If we have been supplied a 'true' heading with the position, override the state to use that.
if 'heading' in data_dict:
self.heading = data_dict['heading']
if "heading" in data_dict:
self.heading = data_dict["heading"]
self.heading_valid = True
return self.get_latest_state()
except:
logging.error("Error reading input data: %s" % traceback.format_exc())
def get_latest_state(self):
''' Get the latest position of the payload '''
""" Get the latest position of the payload """
if len(self.track_history) == 0:
return None
else:
_latest_position = self.track_history[-1]
_state = {
'time' : _latest_position[0],
'lat' : _latest_position[1],
'lon' : _latest_position[2],
'alt' : _latest_position[3],
'ascent_rate': self.ascent_rate,
'is_descending': self.is_descending,
'landing_rate': self.landing_rate,
'heading': self.heading,
'heading_valid': self.heading_valid,
'speed': self.speed
"time": _latest_position[0],
"lat": _latest_position[1],
"lon": _latest_position[2],
"alt": _latest_position[3],
"ascent_rate": self.ascent_rate,
"is_descending": self.is_descending,
"landing_rate": self.landing_rate,
"heading": self.heading,
"heading_valid": self.heading_valid,
"speed": self.speed,
}
return _state
def calculate_ascent_rate(self):
''' Calculate the ascent/descent rate of the payload based on the available data '''
""" Calculate the ascent/descent rate of the payload based on the available data """
if len(self.track_history) <= 1:
return 0.0
elif len(self.track_history) == 2:
# Basic ascent rate case - only 2 samples.
_time_delta = (self.track_history[-1][0] - self.track_history[-2][0]).total_seconds()
_time_delta = (
self.track_history[-1][0] - self.track_history[-2][0]
).total_seconds()
_altitude_delta = self.track_history[-1][3] - self.track_history[-2][3]
if _time_delta == 0:
logging.warning("Zero time-step encountered in ascent rate calculation - are multiple receivers reporting telemetry simultaneously?")
logging.warning(
"Zero time-step encountered in ascent rate calculation - are multiple receivers reporting telemetry simultaneously?"
)
return 0.0
else:
return _altitude_delta/_time_delta
return _altitude_delta / _time_delta
else:
_num_samples = min(len(self.track_history), self.ASCENT_AVERAGING)
_asc_rates = []
for _i in range(-1*(_num_samples-1), 0):
_time_delta = (self.track_history[_i][0] - self.track_history[_i-1][0]).total_seconds()
_altitude_delta = self.track_history[_i][3] - self.track_history[_i-1][3]
for _i in range(-1 * (_num_samples - 1), 0):
_time_delta = (
self.track_history[_i][0] - self.track_history[_i - 1][0]
).total_seconds()
_altitude_delta = (
self.track_history[_i][3] - self.track_history[_i - 1][3]
)
try:
_asc_rates.append(_altitude_delta/_time_delta)
_asc_rates.append(_altitude_delta / _time_delta)
except ZeroDivisionError:
logging.warning("Zero time-step encountered in ascent rate calculation - are multiple receivers reporting telemetry simultaneously?")
logging.warning(
"Zero time-step encountered in ascent rate calculation - are multiple receivers reporting telemetry simultaneously?"
)
continue
return np.mean(_asc_rates)
def calculate_heading(self):
''' Calculate the heading of the payload '''
""" Calculate the heading of the payload """
if len(self.track_history) <= 1:
return 0.0
else:
_pos_1 = self.track_history[-2]
_pos_2 = self.track_history[-1]
_pos_info = position_info((_pos_1[1],_pos_1[2],_pos_1[3]), (_pos_2[1],_pos_2[2],_pos_2[3]))
_pos_info = position_info(
(_pos_1[1], _pos_1[2], _pos_1[3]), (_pos_2[1], _pos_2[2], _pos_2[3])
)
return _pos_info['bearing']
return _pos_info["bearing"]
def calculate_speed(self):
""" Calculate Payload Speed in metres per second """
if len(self.track_history)<=1:
if len(self.track_history) <= 1:
return 0.0
else:
_time_delta = (self.track_history[-1][0] - self.track_history[-2][0]).total_seconds()
_time_delta = (
self.track_history[-1][0] - self.track_history[-2][0]
).total_seconds()
_pos_1 = self.track_history[-2]
_pos_2 = self.track_history[-1]
_pos_info = position_info((_pos_1[1],_pos_1[2],_pos_1[3]), (_pos_2[1],_pos_2[2],_pos_2[3]))
_pos_info = position_info(
(_pos_1[1], _pos_1[2], _pos_1[3]), (_pos_2[1], _pos_2[2], _pos_2[3])
)
try:
_speed = _pos_info['great_circle_distance']/_time_delta
_speed = _pos_info["great_circle_distance"] / _time_delta
except ZeroDivisionError:
logging.warning("Zero time-step encountered in speed calculation - are multiple receivers reporting telemetry simultaneously?")
logging.warning(
"Zero time-step encountered in speed calculation - are multiple receivers reporting telemetry simultaneously?"
)
return 0.0
return _speed
def update_states(self):
''' Update internal states based on the current data '''
""" Update internal states based on the current data """
self.ascent_rate = self.calculate_ascent_rate()
self.speed = self.calculate_speed()
self.heading = self.calculate_heading()
@ -177,9 +188,8 @@ class GenericTrack(object):
_current_alt = self.track_history[-1][3]
self.landing_rate = seaLevelDescentRate(self.ascent_rate, _current_alt)
def to_polyline(self):
''' Generate and return a Leaflet PolyLine compatible array '''
""" Generate and return a Leaflet PolyLine compatible array """
# Copy array into a numpy representation for easier slicing.
if len(self.track_history) == 0:
return []
@ -190,7 +200,9 @@ class GenericTrack(object):
else:
_track_data_np = np.array(self.track_history)
# Produce new array
_track_points = np.column_stack((_track_data_np[:,1], _track_data_np[:,2], _track_data_np[:,3]))
_track_points = np.column_stack(
(_track_data_np[:, 1], _track_data_np[:, 2], _track_data_np[:, 3])
)
return _track_points.tolist()

Wyświetl plik

@ -13,19 +13,22 @@ import traceback
from datetime import datetime
from threading import Thread
class SerialGPS(object):
'''
Read NMEA strings from a serial-connected GPS receiver
'''
def __init__(self,
serial_port = '/dev/ttyUSB0',
serial_baud = 9600,
timeout = 5,
callback = None,
uberdebug = False,
unittest = False):
'''
class SerialGPS(object):
"""
Read NMEA strings from a serial-connected GPS receiver
"""
def __init__(
self,
serial_port="/dev/ttyUSB0",
serial_baud=9600,
timeout=5,
callback=None,
uberdebug=False,
unittest=False,
):
"""
Initialise a SerialGPS object.
This class assumes the serial-connected GPS outputs GPRMC and GPGGA NMEA strings
@ -46,7 +49,7 @@ class SerialGPS(object):
'speed': speed*3.6, # Convert speed to kph.
'valid': position_valid
}
'''
"""
self.serial_port = serial_port
self.serial_baud = serial_baud
@ -58,12 +61,12 @@ class SerialGPS(object):
# 'Chase Car Position' message.
# Note that these packets do not contain a timestamp.
self.gps_state = {
'type': 'GPS',
'latitude': 0.0,
'longitude': 0.0,
'altitude': 0.0,
'speed': 0.0,
'valid': False
"type": "GPS",
"latitude": 0.0,
"longitude": 0.0,
"altitude": 0.0,
"speed": 0.0,
"valid": False,
}
self.serial_thread_running = False
@ -73,9 +76,8 @@ class SerialGPS(object):
if not unittest:
self.start()
def start(self):
''' Start the GPS thread '''
""" Start the GPS thread """
if self.serial_thread != None:
return
else:
@ -83,20 +85,18 @@ class SerialGPS(object):
self.serial_thread = Thread(target=self.gps_thread)
self.serial_thread.start()
def close(self):
''' Stop the GPS thread. '''
""" Stop the GPS thread. """
self.serial_thread_running = False
# Wait for the thread to close.
if self.serial_thread != None:
self.serial_thread.join()
def gps_thread(self):
'''
"""
Attempt to connect to a serial port and read lines of text from it.
Pass all lines on to the NMEA parser function.
'''
"""
try:
import serial
@ -104,18 +104,25 @@ class SerialGPS(object):
logging.critical("Could not import pyserial library!")
return
while self.serial_thread_running:
# Attempt to connect to the serial port.
while self.ser == None and self.serial_thread_running:
try:
self.ser = serial.Serial(port=self.serial_port,baudrate=self.serial_baud,timeout=self.timeout)
logging.info("SerialGPS - Connected to serial port %s" % self.serial_port)
self.ser = serial.Serial(
port=self.serial_port,
baudrate=self.serial_baud,
timeout=self.timeout,
)
logging.info(
"SerialGPS - Connected to serial port %s" % self.serial_port
)
except Exception as e:
# Continue re-trying until we can connect to the serial port.
# This should let the user connect the gps *after* this object if instantiated if required.
logging.error("SerialGPS - Serial Port Error: %s" % e)
logging.error("SerialGPS - Sleeping 10s before attempting re-connect.")
logging.error(
"SerialGPS - Sleeping 10s before attempting re-connect."
)
time.sleep(10)
self.ser = None
continue
@ -125,15 +132,19 @@ class SerialGPS(object):
data = self.ser.readline()
except:
# If we hit a serial read error, attempt to reconnect.
logging.error("SerialGPS - Error reading from serial device! Attempting to reconnect.")
logging.error(
"SerialGPS - Error reading from serial device! Attempting to reconnect."
)
self.ser = None
continue
# Attempt to parse data.
try:
self.parse_nmea(data.decode('ascii'))
self.parse_nmea(data.decode("ascii"))
except ValueError:
logging.debug("SerialGPS - ValueError when attempting to parse data. GPS may not have lock")
logging.debug(
"SerialGPS - ValueError when attempting to parse data. GPS may not have lock"
)
except:
traceback.print_exc()
pass
@ -145,28 +156,26 @@ class SerialGPS(object):
pass
logging.info("SerialGPS - Closing Thread.")
def dm_to_sd(self, dm):
'''
"""
Converts a geographic coordiante given in "degres/minutes" dddmm.mmmm
format (ie, "12319.943281" = 123 degrees, 19.953281 minutes) to a signed
decimal (python float) format.
Courtesy of https://github.com/Knio/pynmea2/
'''
"""
# '12319.943281'
if not dm or dm == '0':
return 0.
if not dm or dm == "0":
return 0.0
d, m = re.match(r'^(\d+)(\d\d\.\d+)$', dm).groups()
d, m = re.match(r"^(\d+)(\d\d\.\d+)$", dm).groups()
return float(d) + float(m) / 60
def parse_nmea(self, data):
'''
"""
Attempt to parse a line of NMEA data.
If we have received a GPGGA string containing a position valid flag,
send the data on to the callback function.
'''
"""
if self.uberdebug:
print(data.strip())
@ -180,16 +189,16 @@ class SerialGPS(object):
gprmc_speed = float(gprmc[7])
if gprmc_latns == "S":
self.gps_state['latitude'] = gprmc_lat*-1.0
self.gps_state["latitude"] = gprmc_lat * -1.0
else:
self.gps_state['latitude'] = gprmc_lat
self.gps_state["latitude"] = gprmc_lat
if gprmc_lonew == "W":
self.gps_state['longitude'] = gprmc_lon*-1.0
self.gps_state["longitude"] = gprmc_lon * -1.0
else:
self.gps_state['longitude'] = gprmc_lon
self.gps_state["longitude"] = gprmc_lon
self.gps_state['speed'] = gprmc_speed*0.51444*3.6
self.gps_state["speed"] = gprmc_speed * 0.51444 * 3.6
elif "$GPGGA" in data:
logging.debug("SerialGPS - Got GPGGA.")
@ -199,35 +208,33 @@ class SerialGPS(object):
gpgga_lon = self.dm_to_sd(gpgga[4])
gpgga_lonew = gpgga[5]
gpgga_fixstatus = gpgga[6]
self.gps_state['altitude'] = float(gpgga[9])
self.gps_state["altitude"] = float(gpgga[9])
if gpgga_latns == "S":
self.gps_state['latitude'] = gpgga_lat*-1.0
self.gps_state["latitude"] = gpgga_lat * -1.0
else:
self.gps_state['latitude'] = gpgga_lat
self.gps_state["latitude"] = gpgga_lat
if gpgga_lonew == "W":
self.gps_state['longitude'] = gpgga_lon*-1.0
self.gps_state["longitude"] = gpgga_lon * -1.0
else:
self.gps_state['longitude'] = gpgga_lon
self.gps_state["longitude"] = gpgga_lon
if gpgga_fixstatus == 0:
self.gps_state['valid'] = False
self.gps_state["valid"] = False
else:
self.gps_state['valid'] = True
self.gps_state["valid"] = True
self.send_to_callback()
else:
# Discard all other lines
pass
def send_to_callback(self):
'''
"""
Send the current GPS data snapshot onto the callback function,
if one exists.
'''
"""
# Generate a copy of the gps state
_state = self.gps_state.copy()
@ -237,21 +244,20 @@ class SerialGPS(object):
self.callback(_state)
except Exception as e:
traceback.print_exc()
logging.error("SerialGPS - Error Passing data to callback - %s" % str(e))
logging.error(
"SerialGPS - Error Passing data to callback - %s" % str(e)
)
class GPSDGPS(object):
''' Read GPS data from a GPSD server '''
""" Read GPS data from a GPSD server """
def __init__(self,
hostname = '127.0.0.1',
port = 2947,
callback = None):
''' Init '''
def __init__(self, hostname="127.0.0.1", port=2947, callback=None):
""" Init """
pass
if __name__ == '__main__':
if __name__ == "__main__":
#
# GPS Parser Test Script
# Call with either:
@ -260,22 +266,31 @@ if __name__ == '__main__':
# $ python -m chasemapper.gps /path/to/nmea_log.txt
#
import sys, time
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
_port = sys.argv[1]
_baud = 9600
def print_data(data):
print(data)
if 'tty' not in _port:
if "tty" not in _port:
unittest = True
else:
unittest = False
_gps = SerialGPS(serial_port=_port, serial_baud=_baud, callback=print_data, uberdebug=True, unittest=unittest)
_gps = SerialGPS(
serial_port=_port,
serial_baud=_baud,
callback=print_data,
uberdebug=True,
unittest=unittest,
)
if unittest:
_f = open(_port, 'r')
_f = open(_port, "r")
for line in _f:
_gps.parse_nmea(line)
time.sleep(0.2)
@ -283,4 +298,3 @@ if __name__ == '__main__':
else:
time.sleep(100)
_gps.close()

Wyświetl plik

@ -39,9 +39,9 @@ import traceback
from threading import Thread
GPSD_HOST = '127.0.0.1' # gpsd
GPSD_HOST = "127.0.0.1" # gpsd
GPSD_PORT = 2947 # defaults
GPSD_PROTOCOL = 'json' # "
GPSD_PROTOCOL = "json" # "
class GPSDSocket(object):
@ -80,14 +80,20 @@ class GPSDSocket(object):
# N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.
command = '?WATCH={{"enable":true,"{0}":true}}'.format(gpsd_protocol)
if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
if (
gpsd_protocol == "rare"
): # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
command = command.replace('"rare":true', '"raw":1')
if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.
if (
gpsd_protocol == "raw"
): # 2 channel that processes binary data, received data verbatim without hex-dumping.
command = command.replace('"raw":true', '"raw",2')
if not enable:
command = command.replace('true', 'false') # sets -all- command values false .
command = command.replace(
"true", "false"
) # sets -all- command values false .
if devicepath:
command = command.replace('}', ',"device":"') + devicepath + '"}'
command = command.replace("}", ',"device":"') + devicepath + '"}'
return self.send(command)
@ -99,7 +105,7 @@ class GPSDSocket(object):
# The POLL command requests data from the last-seen fixes on all active GPS devices.
# Devices must previously have been activated by ?WATCH to be pollable.
try:
self.streamSock.send(bytes(command, encoding='utf-8'))
self.streamSock.send(bytes(command, encoding="utf-8"))
except TypeError:
self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='
except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.
@ -118,15 +124,22 @@ class GPSDSocket(object):
a poll and never blocks.
"""
try:
waitin, _waitout, _waiterror = select.select((self.streamSock,), (), (), timeout)
if not waitin: return None
waitin, _waitout, _waiterror = select.select(
(self.streamSock,), (), (), timeout
)
if not waitin:
return None
else:
gpsd_response = self.streamSock.makefile() # '.makefile(buffering=4096)' In strictly Python3
gpsd_response = (
self.streamSock.makefile()
) # '.makefile(buffering=4096)' In strictly Python3
self.response = gpsd_response.readline()
return self.response
except StopIteration as error:
logging.error("GPSD - The readline exception in GPSDSocket.next is %s" % str(error))
logging.error(
"GPSD - The readline exception in GPSDSocket.next is %s" % str(error)
)
__next__ = next # Workaround for changes in iterating between Python 2.7 and 3
@ -142,29 +155,103 @@ class DataStream(object):
"""Retrieve JSON Object(s) from GPSDSocket and unpack it into respective
gpsd 'class' dictionaries, TPV, SKY, etc. yielding hours of fun and entertainment.
"""
packages = {
'VERSION': {'release', 'proto_major', 'proto_minor', 'remote', 'rev'},
'TPV': {'alt', 'climb', 'device', 'epc', 'epd', 'eps', 'ept', 'epv', 'epx', 'epy', 'lat', 'lon', 'mode', 'speed', 'tag', 'time', 'track'},
'SKY': {'satellites', 'gdop', 'hdop', 'pdop', 'tdop', 'vdop', 'xdop', 'ydop'},
"VERSION": {"release", "proto_major", "proto_minor", "remote", "rev"},
"TPV": {
"alt",
"climb",
"device",
"epc",
"epd",
"eps",
"ept",
"epv",
"epx",
"epy",
"lat",
"lon",
"mode",
"speed",
"tag",
"time",
"track",
},
"SKY": {"satellites", "gdop", "hdop", "pdop", "tdop", "vdop", "xdop", "ydop"},
# Subset of SKY: 'satellites': {'PRN', 'ss', 'el', 'az', 'used'} # is always present.
'GST': {'alt', 'device', 'lat', 'lon', 'major', 'minor', 'orient', 'rms', 'time'},
'ATT': {'acc_len', 'acc_x', 'acc_y', 'acc_z', 'depth', 'device', 'dip', 'gyro_x', 'gyro_y', 'heading', 'mag_len', 'mag_st', 'mag_x',
'mag_y', 'mag_z', 'pitch', 'pitch_st', 'roll', 'roll_st', 'temperature', 'time', 'yaw', 'yaw_st'},
"GST": {
"alt",
"device",
"lat",
"lon",
"major",
"minor",
"orient",
"rms",
"time",
},
"ATT": {
"acc_len",
"acc_x",
"acc_y",
"acc_z",
"depth",
"device",
"dip",
"gyro_x",
"gyro_y",
"heading",
"mag_len",
"mag_st",
"mag_x",
"mag_y",
"mag_z",
"pitch",
"pitch_st",
"roll",
"roll_st",
"temperature",
"time",
"yaw",
"yaw_st",
},
# 'POLL': {'active', 'tpv', 'sky', 'time'},
'PPS': {'device', 'clock_sec', 'clock_nsec', 'real_sec', 'real_nsec', 'precision'},
'TOFF': {'device', 'clock_sec', 'clock_nsec', 'real_sec', 'real_nsec'},
'DEVICES': {'devices', 'remote'},
'DEVICE': {'activated', 'bps', 'cycle', 'mincycle', 'driver', 'flags', 'native', 'parity', 'path', 'stopbits', 'subtype'},
"PPS": {
"device",
"clock_sec",
"clock_nsec",
"real_sec",
"real_nsec",
"precision",
},
"TOFF": {"device", "clock_sec", "clock_nsec", "real_sec", "real_nsec"},
"DEVICES": {"devices", "remote"},
"DEVICE": {
"activated",
"bps",
"cycle",
"mincycle",
"driver",
"flags",
"native",
"parity",
"path",
"stopbits",
"subtype",
},
# 'AIS': {} # see: http://catb.org/gpsd/AIVDM.html
'ERROR': {'message'}} # TODO: Full suite of possible GPSD output
"ERROR": {"message"},
} # TODO: Full suite of possible GPSD output
def __init__(self):
"""Potential data packages from gpsd for a generator of class attribute dictionaries"""
for package_name, dataset in self.packages.items():
_emptydict = {key: 'n/a' for key in dataset}
_emptydict = {key: "n/a" for key in dataset}
setattr(self, package_name, _emptydict)
self.DEVICES['devices'] = {key: 'n/a' for key in self.packages['DEVICE']} # How does multiple listed devices work?
self.DEVICES["devices"] = {
key: "n/a" for key in self.packages["DEVICE"]
} # How does multiple listed devices work?
# self.POLL = {'tpv': self.TPV, 'sky': self.SKY, 'time': 'n/a', 'active': 'n/a'}
def unpack(self, gpsd_socket_response):
@ -179,14 +266,24 @@ class DataStream(object):
applies to a lot of things.
"""
try:
fresh_data = json.loads(gpsd_socket_response) # The reserved word 'class' is popped from JSON object class
package_name = fresh_data.pop('class', 'ERROR') # gpsd data package errors are also 'ERROR'.
package = getattr(self, package_name, package_name) # packages are named for JSON object class
fresh_data = json.loads(
gpsd_socket_response
) # The reserved word 'class' is popped from JSON object class
package_name = fresh_data.pop(
"class", "ERROR"
) # gpsd data package errors are also 'ERROR'.
package = getattr(
self, package_name, package_name
) # packages are named for JSON object class
for key in package.keys():
package[key] = fresh_data.get(key, 'n/a') # Restores 'n/a' if key is absent in the socket response
package[key] = fresh_data.get(
key, "n/a"
) # Restores 'n/a' if key is absent in the socket response
except AttributeError: # 'str' object has no attribute 'keys'
logging.error("GPSD Parser - There is an unexpected exception in DataStream.unpack.")
logging.error(
"GPSD Parser - There is an unexpected exception in DataStream.unpack."
)
return
except (ValueError, KeyError) as error:
@ -195,13 +292,10 @@ class DataStream(object):
class GPSDAdaptor(object):
''' Connect to a GPSD instance, and pass data onto a callback function '''
""" Connect to a GPSD instance, and pass data onto a callback function """
def __init__(self,
hostname = '127.0.0.1',
port = 2947,
callback = None):
'''
def __init__(self, hostname="127.0.0.1", port=2947, callback=None):
"""
Initialize a GPSAdaptor object.
This class uses the GPSDSocket class to connect to a GPSD instance,
@ -211,20 +305,18 @@ class GPSDAdaptor(object):
hostname (str): Hostname of where GPSD is listening.
port (int): GPSD listen port (default = 2947)
callback (function): Callback to pass appropriately formatted dictionary data to.
'''
"""
self.hostname = hostname
self.port = port
self.callback = callback
self.gpsd_thread_running = False
self.gpsd_thread = None
self.start()
def start(self):
''' Start the GPSD thread '''
""" Start the GPSD thread """
if self.gpsd_thread != None:
return
else:
@ -232,21 +324,18 @@ class GPSDAdaptor(object):
self.gpsd_thread = Thread(target=self.gpsd_process_thread)
self.gpsd_thread.start()
def close(self):
''' Stop the GPSD thread. '''
""" Stop the GPSD thread. """
self.gpsd_thread_running = False
# Wait for the thread to close.
if self.gpsd_thread != None:
self.gpsd_thread.join()
def send_to_callback(self, data):
'''
"""
Send the current GPS data snapshot onto the callback function,
if one exists.
'''
"""
# Attempt to pass it onto the callback function.
if self.callback != None:
@ -256,24 +345,25 @@ class GPSDAdaptor(object):
traceback.print_exc()
logging.error("GPSD - Error Passing data to callback - %s" % str(e))
def gpsd_process_thread(self):
''' Attempt to connect to a GPSD instance, and read position information '''
""" Attempt to connect to a GPSD instance, and read position information """
while self.gpsd_thread_running:
# Attempt to connect.
_gpsd_socket = GPSDSocket()
_data_stream = DataStream()
_success = _gpsd_socket.connect(host = self.hostname, port = self.port)
_success = _gpsd_socket.connect(host=self.hostname, port=self.port)
# If we could not connect, wait and try again.
if not _success:
logging.error("GPSD - Connect failed. Waiting 10 seconds before re-trying.")
logging.error(
"GPSD - Connect failed. Waiting 10 seconds before re-trying."
)
time.sleep(10)
continue
# Start watching for data.
_gpsd_socket.watch(gpsd_protocol = 'json')
_gpsd_socket.watch(gpsd_protocol="json")
logging.info("GPSD - Connected to GPSD instance at %s" % self.hostname)
while self.gpsd_thread_running:
@ -281,8 +371,7 @@ class GPSDAdaptor(object):
# If this isn't the case, we should close the connection and re-connect.
_gpsd_data = _gpsd_socket.next(timeout=10)
if _gpsd_data == None or _gpsd_data == '':
if _gpsd_data == None or _gpsd_data == "":
logging.error("GPSD - No data received. Attempting to reconnect.")
# Break out of this loop back to the connection loop.
@ -294,29 +383,28 @@ class GPSDAdaptor(object):
# Extract the Time-Position-Velocity report.
# This will have fields as defined in: http://www.catb.org/gpsd/gpsd_json.html
_TPV = _data_stream.TPV
if _TPV['lat'] == 'n/a' or _TPV['lon'] == 'n/a':
if _TPV["lat"] == "n/a" or _TPV["lon"] == "n/a":
# No position data. Continue.
continue
else:
# Produce output data structure.
if _TPV['speed'] != 'n/a':
_speed = _TPV['speed']
if _TPV["speed"] != "n/a":
_speed = _TPV["speed"]
else:
_speed = 0.0
_gps_state = {
'type': 'GPS',
'latitude': _TPV['lat'],
'longitude': _TPV['lon'],
'altitude': _TPV['alt'],
'speed': _speed,
'valid': True
"type": "GPS",
"latitude": _TPV["lat"],
"longitude": _TPV["lon"],
"altitude": _TPV["alt"],
"speed": _speed,
"valid": True,
}
self.send_to_callback(_gps_state)
# Close the GPSD connection.
try:
_gpsd_socket.close()
@ -324,21 +412,19 @@ class GPSDAdaptor(object):
logging.error("GPSD - Error when closing connection: %s" % str(e))
if __name__ == '__main__':
if __name__ == "__main__":
def print_dict(data):
print(data)
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
_gpsd = GPSDAdaptor(callback=print_dict)
time.sleep(30)
_gpsd.close()
# gpsd_socket = GPSDSocket()
# data_stream = DataStream()
# gpsd_socket.connect()

Wyświetl plik

@ -15,6 +15,7 @@ import json
from base64 import b64encode
from hashlib import sha256
from threading import Thread, Lock
try:
# Python 2
from Queue import Queue
@ -33,6 +34,7 @@ uuids = []
def ISOStringNow():
return "%sZ" % datetime.datetime.utcnow().isoformat()
def postListenerData(doc, timeout=10):
global uuids, url_habitat_db
# do we have at least one uuid, if not go get more
@ -41,12 +43,12 @@ def postListenerData(doc, timeout=10):
# Attempt to add UUID and time data to document.
try:
doc['_id'] = uuids.pop()
doc["_id"] = uuids.pop()
except IndexError:
logging.error("Habitat - Unable to post listener data - no UUIDs available.")
return False
doc['time_uploaded'] = ISOStringNow()
doc["time_uploaded"] = ISOStringNow()
try:
_r = requests.post(url_habitat_db, json=doc, timeout=timeout)
@ -64,11 +66,13 @@ def fetchUuids(timeout=10):
while _retries > 0:
try:
_r = requests.get(url_habitat_uuids % 10, timeout=timeout)
uuids.extend(_r.json()['uuids'])
uuids.extend(_r.json()["uuids"])
logging.debug("Habitat - Got UUIDs")
return
except Exception as e:
logging.error("Habitat - Unable to fetch UUIDs, retrying in 10 seconds - %s" % str(e))
logging.error(
"Habitat - Unable to fetch UUIDs, retrying in 10 seconds - %s" % str(e)
)
time.sleep(10)
_retries = _retries - 1
continue
@ -79,18 +83,16 @@ def fetchUuids(timeout=10):
def initListenerCallsign(callsign, antenna=None, radio=None):
doc = {
'type': 'listener_information',
'time_created' : ISOStringNow(),
'data': {
'callsign': callsign
}
}
"type": "listener_information",
"time_created": ISOStringNow(),
"data": {"callsign": callsign},
}
if antenna != None:
doc['data']['antenna'] = antenna
doc["data"]["antenna"] = antenna
if radio != None:
doc['data']['radio'] = radio
doc["data"]["radio"] = radio
resp = postListenerData(doc)
@ -106,16 +108,16 @@ def uploadListenerPosition(callsign, lat, lon, alt, chase=True):
""" Upload Listener Position """
doc = {
'type': 'listener_telemetry',
'time_created': ISOStringNow(),
'data': {
'callsign': callsign,
'chase': chase,
'latitude': lat,
'longitude': lon,
'altitude': alt,
'speed': 0,
}
"type": "listener_telemetry",
"time_created": ISOStringNow(),
"data": {
"callsign": callsign,
"chase": chase,
"latitude": lat,
"longitude": lon,
"altitude": alt,
"speed": 0,
},
}
# post position to habitat
@ -129,12 +131,10 @@ def uploadListenerPosition(callsign, lat, lon, alt, chase=True):
class HabitatChaseUploader(object):
''' Upload supplied chase car positions to Habitat on a regular basis '''
def __init__(self,
update_rate = 30,
callsign = "N0CALL",
upload_enabled = True):
''' Initialise the Habitat Chase uploader, and start the update thread '''
""" Upload supplied chase car positions to Habitat on a regular basis """
def __init__(self, update_rate=30, callsign="N0CALL", upload_enabled=True):
""" Initialise the Habitat Chase uploader, and start the update thread """
self.update_rate = update_rate
self.callsign = callsign
@ -150,19 +150,17 @@ class HabitatChaseUploader(object):
logging.info("Habitat - Chase-Car Position Uploader Started")
def update_position(self, position):
''' Update the chase car position state
""" Update the chase car position state
This function accepts and stores a copy of the same dictionary structure produced by both
Horus UDP broadcasts, and the serial GPS and GPSD modules
'''
"""
with self.car_position_lock:
self.car_position = position.copy()
def upload_thread(self):
''' Uploader thread '''
""" Uploader thread """
while self.uploader_thread_running:
# Grab a copy of the most recent car position.
@ -182,9 +180,16 @@ class HabitatChaseUploader(object):
self.callsign_init = self.callsign
# Upload the listener position.
uploadListenerPosition(self.callsign, _position['latitude'], _position['longitude'], _position['altitude'])
uploadListenerPosition(
self.callsign,
_position["latitude"],
_position["longitude"],
_position["altitude"],
)
except Exception as e:
logging.error("Habitat - Error uploading chase-car position - %s" % str(e))
logging.error(
"Habitat - Error uploading chase-car position - %s" % str(e)
)
# Wait for next update.
_i = 0
@ -193,15 +198,13 @@ class HabitatChaseUploader(object):
_i += 1
def set_update_rate(self, rate):
''' Set the update rate '''
""" Set the update rate """
self.update_rate = int(rate)
def set_callsign(self, call):
''' Set the callsign '''
""" Set the callsign """
self.callsign = call
def close(self):
self.uploader_thread_running = False
try:
@ -209,11 +212,3 @@ class HabitatChaseUploader(object):
except:
pass
logging.info("Habitat - Chase-Car Position Uploader Closed")

Wyświetl plik

@ -1,13 +1,13 @@
#!/usr/bin/env python
#
# Project Horus - Browser-Based Chase Mapper
# Listeners
# Listeners
#
# Copyright (C) 2018 Mark Jessop <vk5qi@rfhead.net>
# Released under GNU GPL v3 or later
#
# These classes have been pulled in from the horuslib library, to avoid
# requiring horuslib (hopefully soon-to-be retired) as a dependency.
# These classes have been pulled in from the horuslib library, to avoid
# requiring horuslib (hopefully soon-to-be retired) as a dependency.
import socket, json, sys, traceback
from threading import Thread
@ -17,10 +17,10 @@ from datetime import datetime, timedelta
MAX_JSON_LEN = 32768
def fix_datetime(datetime_str, local_dt_str = None):
'''
def fix_datetime(datetime_str, local_dt_str=None):
"""
Given a HH:MM:SS string from an telemetry sentence, produce a complete timestamp, using the current system time as a guide for the date.
'''
"""
if local_dt_str is None:
_now = datetime.utcnow()
@ -32,16 +32,15 @@ def fix_datetime(datetime_str, local_dt_str = None):
_outside_window = False
else:
_outside_window = True
# Append on a timezone indicator if the time doesn't have one.
if datetime_str.endswith('Z') or datetime_str.endswith('+00:00'):
if datetime_str.endswith("Z") or datetime_str.endswith("+00:00"):
pass
else:
datetime_str += "Z"
# Parsing just a HH:MM:SS will return a datetime object with the year, month and day replaced by values in the 'default'
# argument.
# argument.
_telem_dt = parse(datetime_str, default=_now)
if _outside_window:
@ -61,16 +60,18 @@ def fix_datetime(datetime_str, local_dt_str = None):
class UDPListener(object):
''' UDP Broadcast Packet Listener
""" UDP Broadcast Packet Listener
Listens for Horuslib UDP broadcast packets, and passes them onto a callback function
'''
"""
def __init__(self,
def __init__(
self,
callback=None,
summary_callback = None,
gps_callback = None,
bearing_callback = None,
port=55672):
summary_callback=None,
gps_callback=None,
bearing_callback=None,
port=55672,
):
self.udp_port = port
self.callback = callback
@ -82,48 +83,45 @@ class UDPListener(object):
self.s = None
self.udp_listener_running = False
def handle_udp_packet(self, packet):
''' Process a received UDP packet '''
""" Process a received UDP packet """
try:
packet_dict = json.loads(packet)
if self.callback is not None:
self.callback(packet_dict)
if packet_dict['type'] == 'PAYLOAD_SUMMARY':
if packet_dict["type"] == "PAYLOAD_SUMMARY":
if self.summary_callback is not None:
self.summary_callback(packet_dict)
if packet_dict['type'] == 'GPS':
if packet_dict["type"] == "GPS":
if self.gps_callback is not None:
self.gps_callback(packet_dict)
if packet_dict['type'] == 'BEARING':
if packet_dict["type"] == "BEARING":
if self.bearing_callback is not None:
self.bearing_callback(packet_dict)
if packet_dict['type'] == 'MODEM_STATS':
if packet_dict["type"] == "MODEM_STATS":
if self.summary_callback is not None:
self.summary_callback(packet_dict)
except Exception as e:
print("Could not parse packet: %s" % str(e))
traceback.print_exc()
def udp_rx_thread(self):
''' Listen for Broadcast UDP packets '''
""" Listen for Broadcast UDP packets """
self.s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.s.settimeout(1)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except:
pass
self.s.bind(('',self.udp_port))
self.s.bind(("", self.udp_port))
print("Started UDP Listener Thread.")
self.udp_listener_running = True
@ -134,20 +132,18 @@ class UDPListener(object):
m = None
except:
traceback.print_exc()
if m != None:
self.handle_udp_packet(m[0])
print("Closing UDP Listener")
self.s.close()
def start(self):
if self.listener_thread is None:
self.listener_thread = Thread(target=self.udp_rx_thread)
self.listener_thread.start()
def close(self):
self.udp_listener_running = False
self.listener_thread.join()
@ -162,13 +158,11 @@ class OziListener(object):
WAYPOINT,waypoint_name,latitude,longitude,comment\n
"""
allowed_sentences = ['TELEMETRY', 'WAYPOINT']
allowed_sentences = ["TELEMETRY", "WAYPOINT"]
def __init__(self,
hostname = '',
port = 8942,
telemetry_callback = None,
waypoint_callback = None):
def __init__(
self, hostname="", port=8942, telemetry_callback=None, waypoint_callback=None
):
self.input_host = hostname
self.input_port = port
@ -177,21 +171,19 @@ class OziListener(object):
self.start()
def start(self):
''' Start the UDP Listener Thread. '''
""" Start the UDP Listener Thread. """
self.udp_listener_running = True
self.t = Thread(target=self.udp_rx_thread)
self.t.start()
def udp_rx_thread(self):
"""
Listen for incoming UDP packets, and pass them off to another function to be processed.
"""
self.s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.s.settimeout(1)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
@ -199,7 +191,7 @@ class OziListener(object):
except:
pass
self.s.bind((self.input_host, self.input_port))
while self.udp_listener_running:
try:
m = self.s.recvfrom(1024)
@ -207,7 +199,7 @@ class OziListener(object):
m = None
except:
traceback.print_exc()
if m != None:
try:
self.handle_packet(m[0])
@ -215,11 +207,10 @@ class OziListener(object):
traceback.print_exc()
print("ERROR: Couldn't handle packet correctly.")
pass
print("INFO: Closing UDP Listener Thread")
self.s.close()
def close(self):
"""
Close the UDP listener thread.
@ -230,11 +221,10 @@ class OziListener(object):
except:
pass
def handle_telemetry_packet(self, packet):
''' Split a telemetry packet into time/lat/lon/alt, and pass it onto a callback '''
""" Split a telemetry packet into time/lat/lon/alt, and pass it onto a callback """
_fields = packet.split(',')
_fields = packet.split(",")
_short_time = _fields[1]
_lat = float(_fields[2])
_lon = float(_fields[3])
@ -249,20 +239,19 @@ class OziListener(object):
_time_dt = fix_datetime(_short_time)
_output = {
'time' : _time_dt,
'lat' : _lat,
'lon' : _lon,
'alt' : _alt,
'comment' : 'Telemetry Data'
"time": _time_dt,
"lat": _lat,
"lon": _lon,
"alt": _alt,
"comment": "Telemetry Data",
}
self.telemetry_callback(_output)
def handle_waypoint_packet(self, packet):
''' Split a 'Waypoint' packet into fields, and pass onto a callback '''
""" Split a 'Waypoint' packet into fields, and pass onto a callback """
_fields = packet.split(',')
_fields = packet.split(",")
_waypoint_name = _fields[1]
_lat = float(_fields[2])
_lon = float(_fields[3])
@ -271,23 +260,22 @@ class OziListener(object):
_time_dt = datetime.utcnow()
_output = {
'time' : _time_dt,
'name' : _waypoint_name,
'lat' : _lat,
'lon' : _lon,
'comment' : _comment
"time": _time_dt,
"name": _waypoint_name,
"lat": _lat,
"lon": _lon,
"comment": _comment,
}
self.waypoint_callback(_output)
def handle_packet(self, packet):
"""
Check an incoming packet matches a valid type, and then forward it on.
"""
# Extract header (first field)
packet_type = packet.split(',')[0]
packet_type = packet.split(",")[0]
if packet_type not in self.allowed_sentences:
print("ERROR: Got unknown packet: %s" % packet)
@ -305,4 +293,3 @@ class OziListener(object):
except:
print("ERROR: Error when handling packet.")
traceback.print_exc()

Wyświetl plik

@ -12,6 +12,7 @@ import os
import pytz
import time
from threading import Thread, Lock
try:
# Python 2
from Queue import Queue
@ -32,8 +33,9 @@ class ChaseLogger(object):
self.filename = filename
else:
# Otherwise, create a filename based on the current time.
self.filename = os.path.join(log_dir, datetime.datetime.utcnow().strftime("%Y%m%d-%H%MZ.log"))
self.filename = os.path.join(
log_dir, datetime.datetime.utcnow().strftime("%Y%m%d-%H%MZ.log")
)
self.file_lock = Lock()
@ -42,7 +44,7 @@ class ChaseLogger(object):
# Open the file.
try:
self.f = open(self.filename, 'a')
self.f = open(self.filename, "a")
logging.info("Logging - Opened log file %s." % self.filename)
except Exception as e:
self.log_error("Logging - Could not open log file - %s" % str(e))
@ -53,7 +55,6 @@ class ChaseLogger(object):
self.log_process_thread = Thread(target=self.process_queue)
self.log_process_thread.start()
def add_car_position(self, data):
""" Log a chase car position update.
Input dict expected to be in the format:
@ -67,11 +68,11 @@ class ChaseLogger(object):
"""
data['log_type'] = 'CAR POSITION'
data['log_time'] = pytz.utc.localize(datetime.datetime.utcnow()).isoformat()
data["log_type"] = "CAR POSITION"
data["log_time"] = pytz.utc.localize(datetime.datetime.utcnow()).isoformat()
# Convert the input datetime object into a string.
data['time'] = data['time'].isoformat()
data["time"] = data["time"].isoformat()
# Add it to the queue if we are running.
if self.input_processing_running:
@ -83,13 +84,13 @@ class ChaseLogger(object):
""" Log balloon telemetry.
"""
data['log_type'] = 'BALLOON TELEMETRY'
data['log_time'] = pytz.utc.localize(datetime.datetime.utcnow()).isoformat()
data["log_type"] = "BALLOON TELEMETRY"
data["log_time"] = pytz.utc.localize(datetime.datetime.utcnow()).isoformat()
# Convert the input datetime object into a string.
data['time'] = data['time_dt'].isoformat()
data["time"] = data["time_dt"].isoformat()
# Remove the time_dt element (this cannot be serialised to JSON).
data.pop('time_dt')
data.pop("time_dt")
# Add it to the queue if we are running.
if self.input_processing_running:
@ -97,26 +98,11 @@ class ChaseLogger(object):
else:
self.log_error("Processing not running, discarding.")
def add_balloon_prediction(self, data):
""" Log a prediction run """
data['log_type'] = 'PREDICTION'
data['log_time'] = pytz.utc.localize(datetime.datetime.utcnow()).isoformat()
# Add it to the queue if we are running.
if self.input_processing_running:
self.input_queue.put(data)
else:
self.log_error("Processing not running, discarding.")
def add_bearing(self, data):
""" Log a packet of bearing data """
data['log_type'] = 'BEARING'
data['log_time'] = pytz.utc.localize(datetime.datetime.utcnow()).isoformat()
data["log_type"] = "PREDICTION"
data["log_time"] = pytz.utc.localize(datetime.datetime.utcnow()).isoformat()
# Add it to the queue if we are running.
if self.input_processing_running:
@ -124,6 +110,17 @@ class ChaseLogger(object):
else:
self.log_error("Processing not running, discarding.")
def add_bearing(self, data):
""" Log a packet of bearing data """
data["log_type"] = "BEARING"
data["log_time"] = pytz.utc.localize(datetime.datetime.utcnow()).isoformat()
# Add it to the queue if we are running.
if self.input_processing_running:
self.input_queue.put(data)
else:
self.log_error("Processing not running, discarding.")
def process_queue(self):
""" Process data from the input queue, and write telemetry to log files.
@ -146,7 +143,6 @@ class ChaseLogger(object):
# Sleep while waiting for some new data.
time.sleep(5)
def running(self):
""" Check if the logging thread is running.
@ -155,7 +151,6 @@ class ChaseLogger(object):
"""
return self.input_processing_running
def close(self):
try:
self.input_processing_running = False
@ -165,7 +160,6 @@ class ChaseLogger(object):
self.log_info("Stopped Telemetry Logger Thread.")
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
@ -173,7 +167,6 @@ class ChaseLogger(object):
"""
logging.debug("Chase Logger - %s" % line)
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
@ -181,11 +174,9 @@ class ChaseLogger(object):
"""
logging.info("Chase Logger - %s" % line)
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.error("Chase Logger - %s" % line)

Wyświetl plik

@ -11,45 +11,48 @@ import logging
import os
import pytz
import time
#from datetime import datetime
from dateutil.parser import parse
# from datetime import datetime
from dateutil.parser import parse
def read_file(filename):
""" Read log file, and output an array of dicts. """
_output = []
_f = open(filename, 'r')
_f = open(filename, "r")
for _line in _f:
try:
_data = json.loads(_line)
_output.append(_data)
except Exception as e:
logging.debug("Error reading line: %s" % str(e))
if len(_output) != 0 :
if len(_output) != 0:
logging.info("Read %d log entries from %s" % (len(_output), filename))
return _output
def read_last_balloon_telemetry():
""" Read last balloon telemetry. Need to work back from last file to find balloon telemetry and read the last entry - don't return until whole file scanned
""" Read last balloon telemetry. Need to work back from last file to find balloon telemetry and read the last entry - don't return until whole file scanned
"""
_lasttelemetry = []
dirs = sorted(os.listdir("./log_files"),reverse = True) # Generate a reverse sorted list - will have to look through to find last log_file with telemetry
for file in dirs:
if file.endswith(".log"):
telemetry_found = False
try:
log = read_file("./log_files/" + file)
except Exception as e:
logging.debug("Error reading file - maybe in use")
for _entry in log:
if _entry['log_type'] == "BALLOON TELEMETRY":
telemetry_found = True
_last_telemetry = _entry
if telemetry_found == True:
_last_telemetry['time_dt'] = parse(_last_telemetry.pop('time'))
return _last_telemetry
_lasttelemetry = []
dirs = sorted(
os.listdir("./log_files"), reverse=True
) # Generate a reverse sorted list - will have to look through to find last log_file with telemetry
for file in dirs:
if file.endswith(".log"):
telemetry_found = False
try:
log = read_file("./log_files/" + file)
except Exception as e:
logging.debug("Error reading file - maybe in use: %s" % str(e))
for _entry in log:
if _entry["log_type"] == "BALLOON TELEMETRY":
telemetry_found = True
_last_telemetry = _entry
if telemetry_found == True:
_last_telemetry["time_dt"] = parse(_last_telemetry.pop("time"))
return _last_telemetry

Wyświetl plik

@ -11,6 +11,7 @@ from threading import Thread
model_download_running = False
def predictor_download_model(command, callback):
""" Run the supplied command, which should download a GFS model and place it into the GFS directory
@ -51,14 +52,15 @@ def predictor_spawn_download(command, callback=None):
if model_download_running:
return "Already Downloading."
_download_thread = Thread(target=predictor_download_model, kwargs={'command':command, 'callback': callback})
_download_thread = Thread(
target=predictor_download_model,
kwargs={"command": command, "callback": callback},
)
_download_thread.start()
return "Started downloader."
if __name__ == "__main__":
import sys
from .config import parse_config_file
@ -68,11 +70,10 @@ if __name__ == "__main__":
_cfg = parse_config_file(_cfg_file)
if _cfg['pred_model_download'] == "none":
if _cfg["pred_model_download"] == "none":
print("Model download not enabled.")
sys.exit(1)
predictor_download_model(_cfg['pred_model_download'])
print(available_gfs(_cfg['pred_gfs_directory']))
predictor_download_model(_cfg["pred_model_download"])
print(available_gfs(_cfg["pred_gfs_directory"]))

Wyświetl plik

@ -18,6 +18,7 @@ from threading import Thread
TAWHIRI_API_URL = "http://predict.cusf.co.uk/api/v1/"
def get_tawhiri_prediction(
launch_datetime,
launch_latitude,
@ -26,9 +27,9 @@ def get_tawhiri_prediction(
ascent_rate=5.0,
burst_altitude=30000.0,
descent_rate=5.0,
profile='standard_profile',
profile="standard_profile",
dataset=None,
timeout = 10
timeout=10,
):
""" Request a Prediction from the Tawhiri Predictor API """
@ -39,7 +40,6 @@ def get_tawhiri_prediction(
# Create RFC3339-compliant timestamp
_dt_rfc3339 = launch_datetime.isoformat()
_params = {
"launch_latitude": launch_latitude,
"launch_longitude": launch_longitude,
@ -48,9 +48,9 @@ def get_tawhiri_prediction(
"ascent_rate": ascent_rate,
"descent_rate": descent_rate,
"burst_altitude": burst_altitude,
"profile": profile
"profile": profile,
}
if dataset:
_params["dataset"] = dataset
@ -61,9 +61,9 @@ def get_tawhiri_prediction(
_json = _r.json()
if 'error' in _json:
if "error" in _json:
# The Tawhiri API has returned an error
_error = "%s: %s" % (_json['error']['type'], _json['error']['description'])
_error = "%s: %s" % (_json["error"]["type"], _json["error"]["description"])
logging.error("Tawhiri - %s" % _error)
@ -81,30 +81,31 @@ def get_tawhiri_prediction(
def parse_tawhiri_data(data):
""" Parse a returned flight trajectory from Tawhiri, and convert it to a cusf_predictor_wrapper compatible format """
_epoch = pytz.utc.localize(datetime.datetime(1970,1,1))
_epoch = pytz.utc.localize(datetime.datetime(1970, 1, 1))
# Extract dataset information
_dataset = parse(data['request']['dataset'])
_dataset = parse(data["request"]["dataset"])
_dataset = _dataset.strftime("%Y%m%d%Hz")
_path = []
for _stage in data['prediction']:
_trajectory = _stage['trajectory']
for _stage in data["prediction"]:
_trajectory = _stage["trajectory"]
for _point in _trajectory:
# Create UTC timestamp without using datetime.timestamp(), for Python 2.7 backwards compatibility.
_dt = parse(_point['datetime'])
_dt = parse(_point["datetime"])
_dt_timestamp = (_dt - _epoch).total_seconds()
_path.append([_dt_timestamp, _point['latitude'], _point['longitude'], _point['altitude']])
_path.append(
[
_dt_timestamp,
_point["latitude"],
_point["longitude"],
_point["altitude"],
]
)
_output = {
"dataset": _dataset,
"path": _path
}
_output = {"dataset": _dataset, "path": _path}
return _output
@ -113,11 +114,12 @@ if __name__ == "__main__":
import datetime
import pprint
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.INFO)
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.INFO
)
_now = datetime.datetime.utcnow()
# Regular complete-flightpath prediction
_data = get_tawhiri_prediction(
launch_datetime=_now,
@ -134,6 +136,6 @@ if __name__ == "__main__":
launch_longitude=138.5194,
launch_altitude=10000,
burst_altitude=10001,
descent_rate=abs(-6.0)
descent_rate=abs(-6.0),
)
pprint.pprint(_data)

Plik diff jest za duży Load Diff