Add Live KML, fix .py formatting using black

pull/332/head
Mark Jessop 2020-12-18 17:17:54 +10:30
rodzic 1e6e9b00a1
commit 75638eb46a
21 zmienionych plików z 3502 dodań i 2228 usunięć

Plik diff jest za duży Load Diff

Wyświetl plik

@ -29,7 +29,7 @@ __version__ = "1.3.3-beta7"
# 'task' (class) : If this SDR is in use, a reference to the task.
# 'bias' (bool) : True if the bias-tee should be enabled on this SDR, False otherwise.
# 'ppm' (int) : The PPM offset for this SDR.
# 'gain' (float) : The gain setting to use with this SDR. A setting of -1 turns on hardware AGC.
# 'gain' (float) : The gain setting to use with this SDR. A setting of -1 turns on hardware AGC.
# }
#
#
@ -45,7 +45,7 @@ sdr_list = {}
task_list = {}
# Scan result queue.
# Scan result queue.
scan_results = Queue()
# Global scan inhibit flag, used by web interface.
scan_inhibit = False

Wyświetl plik

@ -13,6 +13,7 @@ import traceback
import socket
from threading import Thread, Lock
from . import __version__ as auto_rx_version
try:
# Python 2
from Queue import Queue
@ -21,8 +22,9 @@ except ImportError:
from queue import Queue
def telemetry_to_aprs_position(sonde_data, object_name="<id>", aprs_comment="BOM Balloon", position_report=False):
def telemetry_to_aprs_position(
sonde_data, object_name="<id>", aprs_comment="BOM Balloon", position_report=False
):
""" Convert a dictionary containing Sonde telemetry into an APRS packet.
Args:
@ -33,13 +35,13 @@ def telemetry_to_aprs_position(sonde_data, object_name="<id>", aprs_comment="BOM
"""
# Generate the APRS 'callsign' for the sonde.
# Generate the APRS 'callsign' for the sonde.
if object_name == "<id>":
# Use the radiosonde ID as the object ID
if ('RS92' in sonde_data['type']) or ('RS41' in sonde_data['type']):
if ("RS92" in sonde_data["type"]) or ("RS41" in sonde_data["type"]):
# We can use the Vaisala sonde ID directly.
_object_name = sonde_data["id"].strip()
elif 'DFM' in sonde_data['type']:
elif "DFM" in sonde_data["type"]:
# As per agreement with other radiosonde decoding software developers, we will now
# use the DFM serial number verbatim in the APRS ID, prefixed with 'D'.
# For recent DFM sondes, this will result in a object ID of: Dyynnnnnn
@ -49,7 +51,7 @@ def telemetry_to_aprs_position(sonde_data, object_name="<id>", aprs_comment="BOM
# Split out just the serial number part of the ID, and cast it to an int
# This acts as another check that we have been provided with a numeric serial.
_dfm_id = int(sonde_data['id'].split('-')[-1])
_dfm_id = int(sonde_data["id"].split("-")[-1])
# Create the object name
_object_name = "D%d" % _dfm_id
@ -57,24 +59,24 @@ def telemetry_to_aprs_position(sonde_data, object_name="<id>", aprs_comment="BOM
# Convert to upper-case hex, and take the last 5 nibbles.
_id_suffix = hex(_dfm_id).upper()[-5:]
elif 'M10' in sonde_data['type']:
elif "M10" in sonde_data["type"]:
# Use the generated id same as dxlAPRS
_object_name = sonde_data['aprsid']
_object_name = sonde_data["aprsid"]
elif 'IMET' in sonde_data['type']:
elif "IMET" in sonde_data["type"]:
# Use the last 5 characters of the unique ID we have generated.
_object_name = "IMET" + sonde_data['id'][-5:]
_object_name = "IMET" + sonde_data["id"][-5:]
elif 'LMS' in sonde_data['type']:
elif "LMS" in sonde_data["type"]:
# Use the last 5 hex digits of the sonde ID.
_id_suffix = int(sonde_data['id'].split('-')[1])
_id_suffix = int(sonde_data["id"].split("-")[1])
_id_hex = hex(_id_suffix).upper()
_object_name = "LMS6" + _id_hex[-5:]
elif 'MEISEI' in sonde_data['type']:
elif "MEISEI" in sonde_data["type"]:
# Convert the serial number to an int
_meisei_id = int(sonde_data['id'].split('-')[-1])
_id_suffix = hex(_meisei_id).upper().split('0X')[1]
_meisei_id = int(sonde_data["id"].split("-")[-1])
_id_suffix = hex(_meisei_id).upper().split("0X")[1]
# Clip to 6 hex digits, in case we end up with more for some reason.
if len(_id_suffix) > 6:
_id_suffix = _id_suffix[-6:]
@ -83,7 +85,10 @@ def telemetry_to_aprs_position(sonde_data, object_name="<id>", aprs_comment="BOM
# New Sonde types will be added in here.
else:
# Unknown sonde type, don't know how to handle this yet.
logging.error('No APRS ID conversion available for sonde type: %s' % sonde_data['type'])
logging.error(
"No APRS ID conversion available for sonde type: %s"
% sonde_data["type"]
)
return (None, None)
else:
_object_name = object_name
@ -92,30 +97,34 @@ def telemetry_to_aprs_position(sonde_data, object_name="<id>", aprs_comment="BOM
if len(_object_name) > 9:
_object_name = _object_name[:9]
elif len(_object_name) < 9:
_object_name = _object_name + " "*(9-len(_object_name))
_object_name = _object_name + " " * (9 - len(_object_name))
# Use the actual sonde frequency, if we have it.
if 'f_centre' in sonde_data:
if "f_centre" in sonde_data:
# We have an estimate of the sonde's centre frequency from the modem, use this in place of
# the RX frequency.
# Round to 1 kHz
_freq = round(sonde_data['f_centre']/1000.0)
_freq = round(sonde_data["f_centre"] / 1000.0)
# Convert to MHz.
_freq = "%.3f MHz" % (_freq/1e3)
_freq = "%.3f MHz" % (_freq / 1e3)
else:
# Otherwise, use the normal frequency.
_freq = sonde_data['freq']
_freq = sonde_data["freq"]
# Generate the comment field.
_aprs_comment = aprs_comment
_aprs_comment = _aprs_comment.replace("<freq>", _freq)
_aprs_comment = _aprs_comment.replace("<id>", sonde_data['id'])
_aprs_comment = _aprs_comment.replace("<temp>", "%.1fC" % sonde_data['temp'])
_aprs_comment = _aprs_comment.replace("<pressure>", "%.1fhPa" % sonde_data['pressure'])
_aprs_comment = _aprs_comment.replace("<humidity>", "%.1f" % sonde_data['humidity'] + "%")
_aprs_comment = _aprs_comment.replace("<batt>", "%.1fV" % sonde_data['batt'])
_aprs_comment = _aprs_comment.replace("<vel_v>", "%.1fm/s" % sonde_data['vel_v'])
_aprs_comment = _aprs_comment.replace("<type>", sonde_data['type'])
_aprs_comment = _aprs_comment.replace("<id>", sonde_data["id"])
_aprs_comment = _aprs_comment.replace("<temp>", "%.1fC" % sonde_data["temp"])
_aprs_comment = _aprs_comment.replace(
"<pressure>", "%.1fhPa" % sonde_data["pressure"]
)
_aprs_comment = _aprs_comment.replace(
"<humidity>", "%.1f" % sonde_data["humidity"] + "%"
)
_aprs_comment = _aprs_comment.replace("<batt>", "%.1fV" % sonde_data["batt"])
_aprs_comment = _aprs_comment.replace("<vel_v>", "%.1fm/s" % sonde_data["vel_v"])
_aprs_comment = _aprs_comment.replace("<type>", sonde_data["type"])
# TODO: RS41 Burst Timer
@ -128,25 +137,25 @@ def telemetry_to_aprs_position(sonde_data, object_name="<id>", aprs_comment="BOM
lat_minute = abs(lat - int(lat)) * 60.0
lat_min_str = ("%02.4f" % lat_minute).zfill(7)[:5]
lat_dir = "S"
if lat>0.0:
if lat > 0.0:
lat_dir = "N"
lat_str = "%02d%s" % (lat_degree,lat_min_str) + lat_dir
lat_str = "%02d%s" % (lat_degree, lat_min_str) + lat_dir
# Convert float longitude to APRS format (DDDMM.MM)
lon = float(sonde_data["lon"])
lon_degree = abs(int(lon))
lon_minute = abs(lon - int(lon)) * 60.0
lon_min_str = ("%02.4f" % lon_minute).zfill(7)[:5]
lon_dir = "E"
if lon<0.0:
if lon < 0.0:
lon_dir = "W"
lon_str = "%03d%s" % (lon_degree,lon_min_str) + lon_dir
lon_str = "%03d%s" % (lon_degree, lon_min_str) + lon_dir
# Generate the added digits of precision, as per http://www.aprs.org/datum.txt
# Base-91 can only encode decimal integers between 0 and 93 (otherwise we end up with non-printable characters)
# So, we have to scale the range 00-99 down to 0-90, being careful to avoid errors due to floating point math.
_lat_prec = int(round(float(("%02.4f" % lat_minute)[-2:])/1.10))
_lon_prec = int(round(float(("%02.4f" % lon_minute)[-2:])/1.10))
_lat_prec = int(round(float(("%02.4f" % lat_minute)[-2:]) / 1.10))
_lon_prec = int(round(float(("%02.4f" % lon_minute)[-2:]) / 1.10))
# Now we can add 33 to the 0-90 value to produce the Base-91 character.
_lat_prec = chr(_lat_prec + 33)
@ -158,41 +167,65 @@ def telemetry_to_aprs_position(sonde_data, object_name="<id>", aprs_comment="BOM
_datum = "!w%s%s!" % (_lat_prec, _lon_prec)
# Convert Alt (in metres) to feet
alt = int(float(sonde_data["alt"])/0.3048)
alt = int(float(sonde_data["alt"]) / 0.3048)
# Produce the timestamp
_aprs_timestamp = sonde_data['datetime_dt'].strftime("%H%M%S")
_aprs_timestamp = sonde_data["datetime_dt"].strftime("%H%M%S")
# Generate course/speed data, if provided in the telemetry dictionary
if ('heading' in sonde_data.keys()) and ('vel_h' in sonde_data.keys()):
course_speed = "%03d/%03d" % (int(sonde_data['heading']), int(sonde_data['vel_h']*1.944))
if ("heading" in sonde_data.keys()) and ("vel_h" in sonde_data.keys()):
course_speed = "%03d/%03d" % (
int(sonde_data["heading"]),
int(sonde_data["vel_h"] * 1.944),
)
else:
course_speed = "000/000"
if position_report:
# Produce an APRS position report string
# Note, we are using the 'position with timestamp' data type, as per http://www.aprs.org/doc/APRS101.PDF
out_str = "/%sh%s/%sO%s/A=%06d %s %s" % (_aprs_timestamp,lat_str,lon_str,course_speed,alt,_aprs_comment,_datum)
out_str = "/%sh%s/%sO%s/A=%06d %s %s" % (
_aprs_timestamp,
lat_str,
lon_str,
course_speed,
alt,
_aprs_comment,
_datum,
)
else:
# Produce an APRS Object
out_str = ";%s*%sh%s/%sO%s/A=%06d %s %s" % (_object_name,_aprs_timestamp,lat_str,lon_str,course_speed,alt,_aprs_comment,_datum)
out_str = ";%s*%sh%s/%sO%s/A=%06d %s %s" % (
_object_name,
_aprs_timestamp,
lat_str,
lon_str,
course_speed,
alt,
_aprs_comment,
_datum,
)
# Return both the packet, and the 'callsign'.
return (out_str, _object_name.strip())
def generate_station_object(callsign, lat, lon, comment="radiosonde_auto_rx SondeGate v<version>", icon='/r', position_report=False):
''' Generate a station object '''
def generate_station_object(
callsign,
lat,
lon,
comment="radiosonde_auto_rx SondeGate v<version>",
icon="/r",
position_report=False,
):
""" Generate a station object """
# Pad or limit the station callsign to 9 characters, if it is to long or short.
if len(callsign) > 9:
callsign = callsign[:9]
elif len(callsign) < 9:
callsign = callsign + " "*(9-len(callsign))
callsign = callsign + " " * (9 - len(callsign))
# Convert float latitude to APRS format (DDMM.MM)
lat = float(lat)
@ -200,25 +233,25 @@ def generate_station_object(callsign, lat, lon, comment="radiosonde_auto_rx Sond
lat_minute = abs(lat - int(lat)) * 60.0
lat_min_str = ("%02.4f" % lat_minute).zfill(7)[:5]
lat_dir = "S"
if lat>0.0:
if lat > 0.0:
lat_dir = "N"
lat_str = "%02d%s" % (lat_degree,lat_min_str) + lat_dir
lat_str = "%02d%s" % (lat_degree, lat_min_str) + lat_dir
# Convert float longitude to APRS format (DDDMM.MM)
lon = float(lon)
lon_degree = abs(int(lon))
lon_minute = abs(lon - int(lon)) * 60.0
lon_min_str = ("%02.4f" % lon_minute).zfill(7)[:5]
lon_dir = "E"
if lon<0.0:
if lon < 0.0:
lon_dir = "W"
lon_str = "%03d%s" % (lon_degree,lon_min_str) + lon_dir
lon_str = "%03d%s" % (lon_degree, lon_min_str) + lon_dir
# Generate the added digits of precision, as per http://www.aprs.org/datum.txt
# Base-91 can only encode decimal integers between 0 and 93 (otherwise we end up with non-printable characters)
# So, we have to scale the range 00-99 down to 0-90, being careful to avoid errors due to floating point math.
_lat_prec = int(round(float(("%02.4f" % lat_minute)[-2:])/1.10))
_lon_prec = int(round(float(("%02.4f" % lon_minute)[-2:])/1.10))
_lat_prec = int(round(float(("%02.4f" % lat_minute)[-2:]) / 1.10))
_lon_prec = int(round(float(("%02.4f" % lon_minute)[-2:]) / 1.10))
# Now we can add 33 to the 0-90 value to produce the Base-91 character.
_lat_prec = chr(_lat_prec + 33)
@ -231,19 +264,34 @@ def generate_station_object(callsign, lat, lon, comment="radiosonde_auto_rx Sond
# Generate timestamp using current UTC time
_aprs_timestamp = datetime.datetime.utcnow().strftime("%H%M%S")
# Add version string to position comment, if requested.
_aprs_comment = comment
_aprs_comment = _aprs_comment.replace('<version>', auto_rx_version)
_aprs_comment = _aprs_comment.replace("<version>", auto_rx_version)
# Generate output string
if position_report:
# Produce a position report with no timestamp, as per page 32 of http://www.aprs.org/doc/APRS101.PDF
out_str = "!%s%s%s%s%s %s" % (lat_str, icon[0], lon_str, icon[1], _aprs_comment, _datum)
out_str = "!%s%s%s%s%s %s" % (
lat_str,
icon[0],
lon_str,
icon[1],
_aprs_comment,
_datum,
)
else:
# Produce an object string
out_str = ";%s*%sh%s%s%s%s%s %s" % (callsign, _aprs_timestamp, lat_str, icon[0], lon_str, icon[1], _aprs_comment, _datum)
out_str = ";%s*%sh%s%s%s%s%s %s" % (
callsign,
_aprs_timestamp,
lat_str,
icon[0],
lon_str,
icon[1],
_aprs_comment,
_datum,
)
return out_str
@ -252,8 +300,9 @@ def generate_station_object(callsign, lat, lon, comment="radiosonde_auto_rx Sond
# APRS Uploader Class
#
class APRSUploader(object):
'''
"""
Queued APRS Telemetry Uploader class
This performs uploads to an APRS-IS server.
@ -266,32 +315,44 @@ class APRSUploader(object):
it is immediately emptied, to avoid upload of out-of-date packets.
Note that this uploader object is intended to handle telemetry from multiple sondes
'''
"""
# We require the following fields to be present in the incoming telemetry dictionary data
REQUIRED_FIELDS = ['frame', 'id', 'datetime', 'lat', 'lon', 'alt', 'temp', 'type', 'freq', 'freq_float', 'datetime_dt']
REQUIRED_FIELDS = [
"frame",
"id",
"datetime",
"lat",
"lon",
"alt",
"temp",
"type",
"freq",
"freq_float",
"datetime_dt",
]
def __init__(self,
aprs_callsign = 'N0CALL',
aprs_passcode = "00000",
object_name_override = None,
object_comment = "RadioSonde",
position_report = False,
aprsis_host = 'rotate.aprs2.net',
aprsis_port = 14580,
aprsis_reconnect = 300,
station_beacon = False,
station_beacon_rate = 30,
station_beacon_position = (0.0,0.0,0.0),
station_beacon_comment = "radiosonde_auto_rx SondeGate v<version>",
station_beacon_icon = "/r",
synchronous_upload_time = 30,
callsign_validity_threshold = 5,
upload_queue_size = 16,
upload_timeout = 5,
inhibit = False
):
def __init__(
self,
aprs_callsign="N0CALL",
aprs_passcode="00000",
object_name_override=None,
object_comment="RadioSonde",
position_report=False,
aprsis_host="rotate.aprs2.net",
aprsis_port=14580,
aprsis_reconnect=300,
station_beacon=False,
station_beacon_rate=30,
station_beacon_position=(0.0, 0.0, 0.0),
station_beacon_comment="radiosonde_auto_rx SondeGate v<version>",
station_beacon_icon="/r",
synchronous_upload_time=30,
callsign_validity_threshold=5,
upload_queue_size=16,
upload_timeout=5,
inhibit=False,
):
""" Initialise an APRS Uploader object.
Args:
@ -345,18 +406,20 @@ class APRSUploader(object):
self.inhibit = inhibit
self.station_beacon = {
'enabled': station_beacon,
'position': station_beacon_position,
'rate': station_beacon_rate,
'comment': station_beacon_comment,
'icon': station_beacon_icon
"enabled": station_beacon,
"position": station_beacon_position,
"rate": station_beacon_rate,
"comment": station_beacon_comment,
"icon": station_beacon_icon,
}
if object_name_override is None:
self.object_name_override = "<id>"
else:
self.object_name_override = object_name_override
self.log_info("Using APRS Object Name Override: %s" % self.object_name_override)
self.log_info(
"Using APRS Object Name Override: %s" % self.object_name_override
)
# Our two Queues - one to hold sentences to be upload, the other to temporarily hold
# input telemetry dictionaries before they are converted and processed.
@ -398,7 +461,6 @@ class APRSUploader(object):
self.log_info("APRS Uploader Started.")
def connect(self):
""" Connect to an APRS-IS Server """
# create socket & connect to server
@ -407,36 +469,42 @@ class APRSUploader(object):
try:
self.aprsis_socket.connect((self.aprsis_host, self.aprsis_port))
# Send logon string
#_logon = 'user %s pass %s vers VK5QI-AutoRX filter b/%s \r\n' % (self.aprs_callsign, self.aprs_passcode, self.aprs_callsign)
_logon = 'user %s pass %s vers VK5QI-AutoRX\r\n' % (self.aprs_callsign, self.aprs_passcode)
# _logon = 'user %s pass %s vers VK5QI-AutoRX filter b/%s \r\n' % (self.aprs_callsign, self.aprs_passcode, self.aprs_callsign)
_logon = "user %s pass %s vers VK5QI-AutoRX\r\n" % (
self.aprs_callsign,
self.aprs_passcode,
)
self.log_debug("Logging in: %s" % _logon)
self.aprsis_socket.sendall(_logon.encode('ascii'))
self.aprsis_socket.sendall(_logon.encode("ascii"))
# Set packet filters to limit inbound bandwidth.
_filter = '#filter p/ZZ\r\n'
_filter = "#filter p/ZZ\r\n"
self.log_debug("Setting Filter: %s" % _filter)
self.aprsis_socket.sendall(_filter.encode('ascii'))
_filter = '#filter -t/po\r\n'
self.aprsis_socket.sendall(_filter.encode("ascii"))
_filter = "#filter -t/po\r\n"
self.log_debug("Setting Filter: %s" % _filter)
self.aprsis_socket.sendall(_filter.encode('ascii'))
self.aprsis_socket.sendall(_filter.encode("ascii"))
# Wait for login to complete.
time.sleep(1)
# Check response
_resp = self.aprsis_socket.recv(1024)
try:
_resp = _resp.decode('ascii').strip()
_resp = _resp.decode("ascii").strip()
except:
print(_resp)
if _resp[0] != '#':
if _resp[0] != "#":
raise IOError("Invalid response from APRS-IS Server: %s" % _resp)
else:
self.log_debug("Server Logon Response: %s" % str(_resp))
self.log_info("Connected to APRS-IS server %s:%d" % (self.aprsis_host, self.aprsis_port))
self.log_info(
"Connected to APRS-IS server %s:%d"
% (self.aprsis_host, self.aprsis_port)
)
self.aprsis_lastconnect = time.time()
return True
@ -445,7 +513,6 @@ class APRSUploader(object):
self.aprsis_socket = None
return False
def flush_rx(self):
""" Flush the APRS-IS RX buffer """
try:
@ -456,8 +523,7 @@ class APRSUploader(object):
except:
# Ignore any exceptions from attempting to read the buffer.
pass
def aprsis_upload(self, source, packet, igate=False, retries=5):
""" Upload a packet to APRS-IS
@ -473,7 +539,7 @@ class APRSUploader(object):
if self.inhibit:
self.log_info("Upload Inhibited: %s" % packet)
return True
self.aprsis_upload_lock.acquire()
# If we have not connected in a long time, reset the APRS-IS connection.
@ -487,10 +553,14 @@ class APRSUploader(object):
# If we are emulating an IGATE, then we need to add in a path, a q-construct, and our own callsign.
# We have the TOCALL field 'APRARX' allocated by Bob WB4APR, so we can now use this to indicate
# that these packets have arrived via radiosonde_auto_rx!
_packet = '%s>APRARX,SONDEGATE,TCPIP,qAR,%s:%s\r\n' % (source, self.aprs_callsign, packet)
_packet = "%s>APRARX,SONDEGATE,TCPIP,qAR,%s:%s\r\n" % (
source,
self.aprs_callsign,
packet,
)
else:
# Otherwise, we are probably just placing an object, usually sourced by our own callsign
_packet = '%s>APRS:%s\r\n' % (source, packet)
_packet = "%s>APRS:%s\r\n" % (source, packet)
_attempts = 1
while _attempts < retries:
@ -499,16 +569,16 @@ class APRSUploader(object):
# This will trigger a reconnect.
if self.aprsis_socket is None:
raise IOError("Socket not connected.")
# Attempt to send the packet.
# This will timeout if the socket is locked up.
self.aprsis_socket.sendall(_packet.encode('ascii'))
self.aprsis_socket.sendall(_packet.encode("ascii"))
# If OK, return.
self.log_info("Uploaded to APRS-IS: %s" % str(_packet).strip())
self.aprsis_upload_lock.release()
return True
except Exception as e:
# If something broke, forcibly shutdown the socket, then reconnect.
self.log_error("Upload Error: %s" % str(e))
@ -519,11 +589,10 @@ class APRSUploader(object):
self.connect()
_attempts += 1
# If we end up here, something has really broken.
self.aprsis_upload_lock.release()
return False
def disconnect(self):
""" Close APRS-IS connection """
@ -533,39 +602,39 @@ class APRSUploader(object):
except Exception as e:
self.log_error("Disconnection from APRS-IS Failed - %s" % str(e))
def beacon_station_position(self):
''' Send a station position beacon into APRS-IS '''
if self.station_beacon['enabled']:
if (self.station_beacon['position'][0] == 0.0) and (self.station_beacon['position'][1] == 0.0):
self.log_error("Station position is 0,0, not uploading position beacon.")
""" Send a station position beacon into APRS-IS """
if self.station_beacon["enabled"]:
if (self.station_beacon["position"][0] == 0.0) and (
self.station_beacon["position"][1] == 0.0
):
self.log_error(
"Station position is 0,0, not uploading position beacon."
)
self.last_user_position_upload = time.time()
return
# Generate the station position packet
# Note - this is now generated as an APRS position report, for radiosondy.info compatability.
_packet = generate_station_object(self.aprs_callsign,
self.station_beacon['position'][0],
self.station_beacon['position'][1],
self.station_beacon['comment'],
self.station_beacon['icon'],
position_report=True)
_packet = generate_station_object(
self.aprs_callsign,
self.station_beacon["position"][0],
self.station_beacon["position"][1],
self.station_beacon["comment"],
self.station_beacon["icon"],
position_report=True,
)
# Send the packet as an iGated packet.
self.aprsis_upload(self.aprs_callsign, _packet, igate=True)
self.last_user_position_upload = time.time()
def update_station_position(self, lat, lon, alt):
""" Update the internal station position record. Used when determining the station position by GPSD """
self.station_beacon['position'] = (lat, lon, alt)
self.station_beacon["position"] = (lat, lon, alt)
def aprs_upload_thread(self):
''' Handle uploading of packets to APRS '''
""" Handle uploading of packets to APRS """
self.log_debug("Started APRS Uploader Thread.")
@ -577,19 +646,25 @@ class APRSUploader(object):
while not self.aprs_upload_queue.empty():
_telem = self.aprs_upload_queue.get()
self.log_warning("Uploader queue was full - possible connectivity issue.")
self.log_warning(
"Uploader queue was full - possible connectivity issue."
)
else:
# Otherwise, get the first item in the queue.
_telem = self.aprs_upload_queue.get()
# Convert to a packet.
try:
(_packet, _call) = telemetry_to_aprs_position(_telem,
(_packet, _call) = telemetry_to_aprs_position(
_telem,
object_name=self.object_name_override,
aprs_comment = self.object_comment,
position_report=self.position_report)
aprs_comment=self.object_comment,
position_report=self.position_report,
)
except Exception as e:
self.log_error("Error converting telemetry to APRS packet - %s" % str(e))
self.log_error(
"Error converting telemetry to APRS packet - %s" % str(e)
)
_packet = None
# Attempt to upload it.
@ -599,9 +674,9 @@ class APRSUploader(object):
# usually based on the sonde serial number, and we iGate the position report.
# Otherwise, we upload APRS Objects, sourced by our own callsign, but still iGated via us.
if self.position_report:
self.aprsis_upload(_call,_packet,igate=True)
self.aprsis_upload(_call, _packet, igate=True)
else:
self.aprsis_upload(self.aprs_callsign,_packet,igate=True)
self.aprsis_upload(self.aprs_callsign, _packet, igate=True)
else:
# Wait for a short time before checking the queue again.
@ -609,28 +684,28 @@ class APRSUploader(object):
self.log_debug("Stopped APRS Uploader Thread.")
def upload_timer(self):
""" Add packets to the aprs upload queue if it is time for us to upload. """
while self.timer_thread_running:
if int(time.time()) % self.synchronous_upload_time == 0:
# Time to upload!
# Time to upload!
for _id in self.observed_payloads.keys():
# If no data, continue...
if self.observed_payloads[_id]['data'].empty():
if self.observed_payloads[_id]["data"].empty():
continue
else:
# Otherwise, dump the queue and keep the latest telemetry.
while not self.observed_payloads[_id]['data'].empty():
_telem = self.observed_payloads[_id]['data'].get()
while not self.observed_payloads[_id]["data"].empty():
_telem = self.observed_payloads[_id]["data"].get()
# Attept to add it to the habitat uploader queue.
try:
self.aprs_upload_queue.put_nowait(_telem)
except Exception as e:
self.log_error("Error adding sentence to queue: %s" % str(e))
self.log_error(
"Error adding sentence to queue: %s" % str(e)
)
# Sleep a second so we don't hit the synchronous upload time again.
time.sleep(1)
@ -641,7 +716,6 @@ class APRSUploader(object):
# Not yet time to upload, wait for a bit.
time.sleep(0.1)
def process_queue(self):
""" Process packets from the input queue.
@ -656,34 +730,41 @@ class APRSUploader(object):
# Grab latest telem dictionary.
_telem = self.input_queue.get_nowait()
_id = _telem['id']
_id = _telem["id"]
if _id not in self.observed_payloads:
# We haven't seen this ID before, so create a new dictionary entry for it.
self.observed_payloads[_id] = {'count':1, 'data':Queue()}
self.log_debug("New Payload %s. Not observed enough to allow upload." % _id)
self.observed_payloads[_id] = {"count": 1, "data": Queue()}
self.log_debug(
"New Payload %s. Not observed enough to allow upload." % _id
)
# However, we don't yet add anything to the queue for this payload...
else:
# We have seen this payload before!
# Increment the 'seen' counter.
self.observed_payloads[_id]['count'] += 1
self.observed_payloads[_id]["count"] += 1
# If we have seen this particular ID enough times, add the data to the ID's queue.
if self.observed_payloads[_id]['count'] >= self.callsign_validity_threshold:
if (
self.observed_payloads[_id]["count"]
>= self.callsign_validity_threshold
):
# Add the telemetry to the queue
self.observed_payloads[_id]['data'].put(_telem)
self.observed_payloads[_id]["data"].put(_telem)
else:
self.log_debug("Payload ID %s not observed enough to allow upload." % _id)
self.log_debug(
"Payload ID %s not observed enough to allow upload." % _id
)
if (time.time() - self.last_user_position_upload) > self.station_beacon['rate']*60:
if (time.time() - self.last_user_position_upload) > self.station_beacon[
"rate"
] * 60:
if self.aprsis_socket != None:
self.beacon_station_position()
time.sleep(0.1)
def add(self, telemetry):
""" Add a dictionary of telemetry to the input queue.
@ -693,8 +774,8 @@ class APRSUploader(object):
"""
# Discard any telemetry which is indicated to be encrypted.
if 'encrypted' in telemetry:
if telemetry['encrypted'] == True:
if "encrypted" in telemetry:
if telemetry["encrypted"] == True:
return
# Check the telemetry dictionary contains the required fields.
@ -709,10 +790,8 @@ class APRSUploader(object):
else:
self.log_error("Processing not running, discarding.")
def close(self):
''' Shutdown uploader and processing threads. '''
""" Shutdown uploader and processing threads. """
self.log_debug("Waiting for threads to close...")
self.input_processing_running = False
self.timer_thread_running = False
@ -730,7 +809,6 @@ class APRSUploader(object):
if self.input_thread is not None:
self.input_thread.join()
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
@ -738,7 +816,6 @@ class APRSUploader(object):
"""
logging.debug("APRS-IS - %s" % line)
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
@ -746,7 +823,6 @@ class APRSUploader(object):
"""
logging.info("APRS-IS - %s" % line)
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:
@ -754,7 +830,6 @@ class APRSUploader(object):
"""
logging.error("APRS-IS - %s" % line)
def log_warning(self, line):
""" Helper function to log a warning message with a descriptive heading.
Args:
@ -763,36 +838,112 @@ class APRSUploader(object):
logging.warning("APRS-IS - %s" % line)
if __name__ == "__main__":
# Some unit tests for the APRS packet generation code.
# ['frame', 'id', 'datetime', 'lat', 'lon', 'alt', 'temp', 'type', 'freq', 'freq_float', 'datetime_dt']
test_telem = [
# These types of DFM serial IDs are deprecated
#{'id':'DFM06-123456', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'DFM', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
#{'id':'DFM09-123456', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'DFM', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
#{'id':'DFM15-123456', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'DFM', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
#{'id':'DFM17-12345678', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'DFM', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
{'id':'DFM-19123456', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'humidity':1.0, 'pressure':1000.0, 'batt':3.0, 'type':'DFM17', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
{'id':'DFM-123456', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'humidity':1.0, 'pressure':1000.0, 'batt':3.0, 'type':'DFM06', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
{'id':'N1234567', 'frame':10, 'lat':-10.00001, 'lon':9.99999999, 'alt':10000, 'temp':1.0, 'humidity':1.0, 'pressure':1000.0, 'batt':3.0, 'type':'RS41', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
{'id':'M1234567', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'humidity':1.0, 'pressure':1000.0, 'batt':3.0, 'type':'RS92', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
]
# {'id':'DFM06-123456', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'DFM', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
# {'id':'DFM09-123456', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'DFM', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
# {'id':'DFM15-123456', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'DFM', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
# {'id':'DFM17-12345678', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'DFM', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()},
{
"id": "DFM-19123456",
"frame": 10,
"lat": -10.0,
"lon": 10.0,
"alt": 10000,
"temp": 1.0,
"humidity": 1.0,
"pressure": 1000.0,
"batt": 3.0,
"type": "DFM17",
"freq": "401.520 MHz",
"freq_float": 401.52,
"heading": 0.0,
"vel_h": 5.1,
"vel_v": -5.0,
"datetime_dt": datetime.datetime.utcnow(),
},
{
"id": "DFM-123456",
"frame": 10,
"lat": -10.0,
"lon": 10.0,
"alt": 10000,
"temp": 1.0,
"humidity": 1.0,
"pressure": 1000.0,
"batt": 3.0,
"type": "DFM06",
"freq": "401.520 MHz",
"freq_float": 401.52,
"heading": 0.0,
"vel_h": 5.1,
"vel_v": -5.0,
"datetime_dt": datetime.datetime.utcnow(),
},
{
"id": "N1234567",
"frame": 10,
"lat": -10.00001,
"lon": 9.99999999,
"alt": 10000,
"temp": 1.0,
"humidity": 1.0,
"pressure": 1000.0,
"batt": 3.0,
"type": "RS41",
"freq": "401.520 MHz",
"freq_float": 401.52,
"heading": 0.0,
"vel_h": 5.1,
"vel_v": -5.0,
"datetime_dt": datetime.datetime.utcnow(),
},
{
"id": "M1234567",
"frame": 10,
"lat": -10.0,
"lon": 10.0,
"alt": 10000,
"temp": 1.0,
"humidity": 1.0,
"pressure": 1000.0,
"batt": 3.0,
"type": "RS92",
"freq": "401.520 MHz",
"freq_float": 401.52,
"heading": 0.0,
"vel_h": 5.1,
"vel_v": -5.0,
"datetime_dt": datetime.datetime.utcnow(),
},
]
comment_field = "Clb=<vel_v> t=<temp> <freq> Type=<type> Radiosonde http://bit.ly/2Bj4Sfk"
comment_field = (
"Clb=<vel_v> t=<temp> <freq> Type=<type> Radiosonde http://bit.ly/2Bj4Sfk"
)
for _telem in test_telem:
out_str = telemetry_to_aprs_position(_telem, object_name="<id>", aprs_comment=comment_field, position_report=False)
out_str = telemetry_to_aprs_position(
_telem,
object_name="<id>",
aprs_comment=comment_field,
position_report=False,
)
print(out_str)
# APRS Testing
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
test = APRSUploader(aprs_callsign="VK5QI", aprs_passcode="23032", aprsis_host="radiosondy.info")
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
test = APRSUploader(
aprs_callsign="VK5QI", aprs_passcode="23032", aprsis_host="radiosondy.info"
)
test.connect()
time.sleep(5)
test.disconnect()
test.close()
test.close()

Wyświetl plik

@ -14,7 +14,13 @@ import json
from .utils import rtlsdr_test
# Dummy initial config with some parameters we need to make the web interface happy.
global_config = {'min_freq':400.0,'max_freq':403.0,'snr_threshold':10,'station_lat':0.0,'station_lon':0.0}
global_config = {
"min_freq": 400.0,
"max_freq": 403.0,
"snr_threshold": 10,
"station_lat": 0.0,
"station_lon": 0.0,
}
try:
# Python 2
@ -29,8 +35,9 @@ except ImportError:
MINIMUM_APRS_UPDATE_RATE = 30
MINIMUM_HABITAT_UPDATE_RATE = 30
def read_auto_rx_config(filename, no_sdr_test=False):
""" Read an Auto-RX v2 Station Configuration File.
""" Read an Auto-RX v2 Station Configuration File.
This function will attempt to parse a configuration file.
It will also confirm the accessibility of any SDRs specified in the config file.
@ -43,384 +50,519 @@ def read_auto_rx_config(filename, no_sdr_test=False):
auto_rx_config (dict): The configuration dictionary.
sdr_config (dict): A dictionary with SDR parameters.
"""
global global_config
# Configuration Defaults:
auto_rx_config = {
# Log Settings
'per_sonde_log' : True,
global global_config
# Configuration Defaults:
auto_rx_config = {
# Log Settings
"per_sonde_log": True,
# Email Settings
'email_enabled': False,
'email_error_notifications': False,
'email_smtp_server': 'localhost',
'email_smtp_port': 25,
'email_smtp_authentication': 'None',
'email_smtp_login': 'None',
'email_smtp_password': 'None',
'email_from': 'sonde@localhost',
'email_to': None,
'email_subject': "<type> Sonde launch detected on <freq>: <id>",
# SDR Settings
'sdr_fm': 'rtl_fm',
'sdr_power': 'rtl_power',
'sdr_quantity': 1,
# Search Parameters
'min_freq' : 400.4,
'max_freq' : 404.0,
'rx_timeout' : 120,
'whitelist' : [],
'blacklist' : [],
'greylist' : [],
# Location Settings
'station_lat' : 0.0,
'station_lon' : 0.0,
'station_alt' : 0.0,
'station_code' : 'SONDE', # NOTE: This will not be read from the config file, but will be left in place for now
# as a default setting.
'gpsd_enabled' : False,
'gpsd_host' : 'localhost',
'gpsd_port' : 2947,
# Position Filter Settings
'max_altitude' : 50000,
'max_radius_km' : 1000,
'min_radius_km' : 0,
'radius_temporary_block': False,
# Habitat Settings
'habitat_enabled': False,
'habitat_upload_rate': 30,
'habitat_uploader_callsign': 'SONDE_AUTO_RX',
'habitat_uploader_antenna': '1/4-wave',
'habitat_upload_listener_position': False,
'habitat_payload_callsign': '<id>',
# APRS Settings
'aprs_enabled' : False,
'aprs_upload_rate': 30,
'aprs_user' : 'N0CALL',
'aprs_pass' : '00000',
'aprs_server' : 'rotate.aprs2.net',
'aprs_object_id': '<id>',
'aprs_use_custom_object_id': False,
'aprs_custom_comment': 'Radiosonde Auto-RX <freq>',
'aprs_position_report': False,
'station_beacon_enabled': False,
'station_beacon_rate': 30,
'station_beacon_comment': "radiosonde_auto_rx SondeGate v<version>",
'station_beacon_icon': '/r',
# Web Settings,
'web_host' : '0.0.0.0',
'web_port' : 5000,
'web_archive_age': 120,
'web_control': True,
# Advanced Parameters
'search_step' : 800,
'snr_threshold' : 10,
'min_distance' : 1000,
'dwell_time' : 10,
'max_peaks' : 10,
'quantization' : 10000,
'decoder_spacing_limit': 15000,
'synchronous_upload' : False,
'scan_dwell_time' : 20,
'detect_dwell_time' : 5,
'scan_delay' : 10,
'payload_id_valid' : 5,
'temporary_block_time' : 60,
'rs41_drift_tweak': False,
'decoder_stats': False,
'ngp_tweak': False,
# Rotator Settings
'enable_rotator': False,
'rotator_update_rate': 30,
'rotator_hostname': '127.0.0.1',
'rotator_port' : 4533,
'rotation_threshold': 5.0,
'rotator_homing_enabled': False,
'rotator_homing_delay': 10,
'rotator_home_azimuth': 0,
'rotator_home_elevation': 0,
# OziExplorer Settings
'ozi_enabled' : False,
'ozi_update_rate': 5,
'ozi_port' : 55681,
'payload_summary_enabled': False,
'payload_summary_port' : 55672,
# Debugging settings
'save_detection_audio' : False,
'save_decode_audio' : False,
'save_decode_iq' : False,
# URL for the Habitat DB Server.
# As of July 2018 we send via sondehub.org, which will allow us to eventually transition away
# from using the habhub.org tracker, and leave it for use by High-Altitude Balloon Hobbyists.
# For now, sondehub.org just acts as a proxy to habhub.org.
# This setting is not exposed to users as it's only used for unit/int testing
'habitat_url': "https://habitat.sondehub.org/"
"email_enabled": False,
#'email_error_notifications': False,
"email_smtp_server": "localhost",
"email_smtp_port": 25,
"email_smtp_authentication": "None",
"email_smtp_login": "None",
"email_smtp_password": "None",
"email_from": "sonde@localhost",
"email_to": None,
"email_subject": "<type> Sonde launch detected on <freq>: <id>",
# SDR Settings
"sdr_fm": "rtl_fm",
"sdr_power": "rtl_power",
"sdr_quantity": 1,
# Search Parameters
"min_freq": 400.4,
"max_freq": 404.0,
"rx_timeout": 120,
"whitelist": [],
"blacklist": [],
"greylist": [],
# Location Settings
"station_lat": 0.0,
"station_lon": 0.0,
"station_alt": 0.0,
"station_code": "SONDE", # NOTE: This will not be read from the config file, but will be left in place for now
# as a default setting.
"gpsd_enabled": False,
"gpsd_host": "localhost",
"gpsd_port": 2947,
# Position Filter Settings
"max_altitude": 50000,
"max_radius_km": 1000,
"min_radius_km": 0,
"radius_temporary_block": False,
# Habitat Settings
"habitat_enabled": False,
"habitat_upload_rate": 30,
"habitat_uploader_callsign": "SONDE_AUTO_RX",
"habitat_uploader_antenna": "1/4-wave",
"habitat_upload_listener_position": False,
"habitat_payload_callsign": "<id>",
# APRS Settings
"aprs_enabled": False,
"aprs_upload_rate": 30,
"aprs_user": "N0CALL",
"aprs_pass": "00000",
"aprs_server": "rotate.aprs2.net",
"aprs_object_id": "<id>",
#'aprs_use_custom_object_id': False,
"aprs_custom_comment": "Radiosonde Auto-RX <freq>",
"aprs_position_report": False,
"station_beacon_enabled": False,
"station_beacon_rate": 30,
"station_beacon_comment": "radiosonde_auto_rx SondeGate v<version>",
"station_beacon_icon": "/r",
# Web Settings,
"web_host": "0.0.0.0",
"web_port": 5000,
"web_archive_age": 120,
"web_control": True,
#'kml_refresh_rate': 10,
# Advanced Parameters
"search_step": 800,
"snr_threshold": 10,
"min_distance": 1000,
"dwell_time": 10,
"max_peaks": 10,
"quantization": 10000,
"decoder_spacing_limit": 15000,
"synchronous_upload": False,
"scan_dwell_time": 20,
"detect_dwell_time": 5,
"scan_delay": 10,
"payload_id_valid": 5,
"temporary_block_time": 60,
"rs41_drift_tweak": False,
"decoder_stats": False,
"ngp_tweak": False,
# Rotator Settings
"enable_rotator": False,
"rotator_update_rate": 30,
"rotator_hostname": "127.0.0.1",
"rotator_port": 4533,
"rotation_threshold": 5.0,
"rotator_homing_enabled": False,
"rotator_homing_delay": 10,
"rotator_home_azimuth": 0,
"rotator_home_elevation": 0,
# OziExplorer Settings
"ozi_enabled": False,
"ozi_update_rate": 5,
"ozi_port": 55681,
"payload_summary_enabled": False,
"payload_summary_port": 55672,
# Debugging settings
"save_detection_audio": False,
"save_decode_audio": False,
"save_decode_iq": False,
# URL for the Habitat DB Server.
# As of July 2018 we send via sondehub.org, which will allow us to eventually transition away
# from using the habhub.org tracker, and leave it for use by High-Altitude Balloon Hobbyists.
# For now, sondehub.org just acts as a proxy to habhub.org.
# This setting is not exposed to users as it's only used for unit/int testing
"habitat_url": "https://habitat.sondehub.org/",
}
}
try:
# Check the file exists.
if not os.path.isfile(filename):
logging.critical("Config file %s does not exist!" % filename)
return None
config = RawConfigParser(auto_rx_config)
config.read(filename)
# Log Settings
auto_rx_config["per_sonde_log"] = config.getboolean("logging", "per_sonde_log")
# Email Settings
if config.has_option("email", "email_enabled"):
try:
auto_rx_config["email_enabled"] = config.getboolean(
"email", "email_enabled"
)
auto_rx_config["email_smtp_server"] = config.get("email", "smtp_server")
auto_rx_config["email_smtp_port"] = config.get("email", "smtp_port")
auto_rx_config["email_smtp_authentication"] = config.get(
"email", "smtp_authentication"
)
auto_rx_config["email_smtp_login"] = config.get("email", "smtp_login")
auto_rx_config["email_smtp_password"] = config.get(
"email", "smtp_password"
)
auto_rx_config["email_from"] = config.get("email", "from")
auto_rx_config["email_to"] = config.get("email", "to")
auto_rx_config["email_subject"] = config.get("email", "subject")
if auto_rx_config["email_smtp_authentication"] not in [
"None",
"TLS",
"SSL",
]:
logging.error(
"Config - Invalid email authentication setting. Must be None, TLS or SSL."
)
return None
except:
logging.error("Config - Invalid or missing email settings. Disabling.")
auto_rx_config["email_enabled"] = False
# SDR Settings
auto_rx_config["sdr_fm"] = config.get("advanced", "sdr_fm_path")
auto_rx_config["sdr_power"] = config.get("advanced", "sdr_power_path")
auto_rx_config["sdr_quantity"] = config.getint("sdr", "sdr_quantity")
# Search Parameters
auto_rx_config["min_freq"] = config.getfloat("search_params", "min_freq")
auto_rx_config["max_freq"] = config.getfloat("search_params", "max_freq")
auto_rx_config["rx_timeout"] = config.getint("search_params", "rx_timeout")
auto_rx_config["whitelist"] = json.loads(
config.get("search_params", "whitelist")
)
auto_rx_config["blacklist"] = json.loads(
config.get("search_params", "blacklist")
)
auto_rx_config["greylist"] = json.loads(config.get("search_params", "greylist"))
# Location Settings
auto_rx_config["station_lat"] = config.getfloat("location", "station_lat")
auto_rx_config["station_lon"] = config.getfloat("location", "station_lon")
auto_rx_config["station_alt"] = config.getfloat("location", "station_alt")
# Position Filtering
auto_rx_config["max_altitude"] = config.getint("filtering", "max_altitude")
auto_rx_config["max_radius_km"] = config.getint("filtering", "max_radius_km")
# Habitat Settings
auto_rx_config["habitat_enabled"] = config.getboolean(
"habitat", "habitat_enabled"
)
auto_rx_config["habitat_upload_rate"] = config.getint("habitat", "upload_rate")
auto_rx_config["habitat_uploader_callsign"] = config.get(
"habitat", "uploader_callsign"
)
auto_rx_config["habitat_upload_listener_position"] = config.getboolean(
"habitat", "upload_listener_position"
)
auto_rx_config["habitat_uploader_antenna"] = config.get(
"habitat", "uploader_antenna"
).strip()
try: # Use the default configuration if not found
auto_rx_config["habitat_url"] = config.get("habitat", "url")
except:
pass
if auto_rx_config["habitat_upload_rate"] < MINIMUM_HABITAT_UPDATE_RATE:
logging.warning(
"Config - Habitat Update Rate clipped to minimum of %d seconds. Please be respectful of other users of Habitat."
% MINIMUM_HABITAT_UPDATE_RATE
)
auto_rx_config["habitat_upload_rate"] = MINIMUM_HABITAT_UPDATE_RATE
# APRS Settings
auto_rx_config["aprs_enabled"] = config.getboolean("aprs", "aprs_enabled")
auto_rx_config["aprs_upload_rate"] = config.getint("aprs", "upload_rate")
auto_rx_config["aprs_user"] = config.get("aprs", "aprs_user")
auto_rx_config["aprs_pass"] = config.get("aprs", "aprs_pass")
auto_rx_config["aprs_server"] = config.get("aprs", "aprs_server")
auto_rx_config["aprs_object_id"] = config.get("aprs", "aprs_object_id")
auto_rx_config["aprs_custom_comment"] = config.get(
"aprs", "aprs_custom_comment"
)
auto_rx_config["aprs_position_report"] = config.getboolean(
"aprs", "aprs_position_report"
)
auto_rx_config["station_beacon_enabled"] = config.getboolean(
"aprs", "station_beacon_enabled"
)
auto_rx_config["station_beacon_rate"] = config.getint(
"aprs", "station_beacon_rate"
)
auto_rx_config["station_beacon_comment"] = config.get(
"aprs", "station_beacon_comment"
)
auto_rx_config["station_beacon_icon"] = config.get(
"aprs", "station_beacon_icon"
)
if auto_rx_config["aprs_upload_rate"] < MINIMUM_APRS_UPDATE_RATE:
logging.warning(
"Config - APRS Update Rate clipped to minimum of %d seconds. Please be respectful of other users of APRS-IS."
% MINIMUM_APRS_UPDATE_RATE
)
auto_rx_config["aprs_upload_rate"] = MINIMUM_APRS_UPDATE_RATE
# OziPlotter Settings
auto_rx_config["ozi_enabled"] = config.getboolean("oziplotter", "ozi_enabled")
auto_rx_config["ozi_update_rate"] = config.getint(
"oziplotter", "ozi_update_rate"
)
auto_rx_config["ozi_port"] = config.getint("oziplotter", "ozi_port")
auto_rx_config["payload_summary_enabled"] = config.getboolean(
"oziplotter", "payload_summary_enabled"
)
auto_rx_config["payload_summary_port"] = config.getint(
"oziplotter", "payload_summary_port"
)
# Advanced Settings
auto_rx_config["search_step"] = config.getfloat("advanced", "search_step")
auto_rx_config["snr_threshold"] = config.getfloat("advanced", "snr_threshold")
auto_rx_config["min_distance"] = config.getfloat("advanced", "min_distance")
auto_rx_config["dwell_time"] = config.getint("advanced", "dwell_time")
auto_rx_config["quantization"] = config.getint("advanced", "quantization")
auto_rx_config["max_peaks"] = config.getint("advanced", "max_peaks")
auto_rx_config["scan_dwell_time"] = config.getint("advanced", "scan_dwell_time")
auto_rx_config["detect_dwell_time"] = config.getint(
"advanced", "detect_dwell_time"
)
auto_rx_config["scan_delay"] = config.getint("advanced", "scan_delay")
auto_rx_config["payload_id_valid"] = config.getint(
"advanced", "payload_id_valid"
)
auto_rx_config["synchronous_upload"] = config.getboolean(
"advanced", "synchronous_upload"
)
# Rotator Settings
auto_rx_config["rotator_enabled"] = config.getboolean(
"rotator", "rotator_enabled"
)
auto_rx_config["rotator_update_rate"] = config.getint("rotator", "update_rate")
auto_rx_config["rotator_hostname"] = config.get("rotator", "rotator_hostname")
auto_rx_config["rotator_port"] = config.getint("rotator", "rotator_port")
auto_rx_config["rotator_homing_enabled"] = config.getboolean(
"rotator", "rotator_homing_enabled"
)
auto_rx_config["rotator_home_azimuth"] = config.getfloat(
"rotator", "rotator_home_azimuth"
)
auto_rx_config["rotator_home_elevation"] = config.getfloat(
"rotator", "rotator_home_elevation"
)
auto_rx_config["rotator_homing_delay"] = config.getint(
"rotator", "rotator_homing_delay"
)
auto_rx_config["rotation_threshold"] = config.getfloat(
"rotator", "rotation_threshold"
)
# Web interface settings.
auto_rx_config["web_host"] = config.get("web", "web_host")
auto_rx_config["web_port"] = config.getint("web", "web_port")
auto_rx_config["web_archive_age"] = config.getint("web", "archive_age")
auto_rx_config["save_detection_audio"] = config.getboolean(
"debugging", "save_detection_audio"
)
auto_rx_config["save_decode_audio"] = config.getboolean(
"debugging", "save_decode_audio"
)
auto_rx_config["save_decode_iq"] = config.getboolean(
"debugging", "save_decode_iq"
)
# NOTE 2019-09-21: The station code will now be fixed at the default to avoid multiple iMet callsign issues.
# auto_rx_config['station_code'] = config.get('location', 'station_code')
# if len(auto_rx_config['station_code']) > 5:
# auto_rx_config['station_code'] = auto_rx_config['station_code'][:5]
# logging.warning("Config - Clipped station code to 5 digits: %s" % auto_rx_config['station_code'])
auto_rx_config["temporary_block_time"] = config.getint(
"advanced", "temporary_block_time"
)
# New demod tweaks - Added 2019-04-23
# Default to all experimental decoders on.
auto_rx_config["experimental_decoders"] = {
"RS41": True,
"RS92": True,
"DFM": True,
"M10": True,
"M20": True,
"IMET": False,
"LMS6": True,
"MK2LMS": False,
"MEISEI": False,
"UDP": False,
}
auto_rx_config["rs41_drift_tweak"] = config.getboolean(
"advanced", "drift_tweak"
)
auto_rx_config["decoder_spacing_limit"] = config.getint(
"advanced", "decoder_spacing_limit"
)
auto_rx_config["experimental_decoders"]["RS41"] = config.getboolean(
"advanced", "rs41_experimental"
)
auto_rx_config["experimental_decoders"]["RS92"] = config.getboolean(
"advanced", "rs92_experimental"
)
auto_rx_config["experimental_decoders"]["M10"] = config.getboolean(
"advanced", "m10_experimental"
)
auto_rx_config["experimental_decoders"]["DFM"] = config.getboolean(
"advanced", "dfm_experimental"
)
auto_rx_config["experimental_decoders"]["LMS6"] = config.getboolean(
"advanced", "lms6-400_experimental"
)
try:
auto_rx_config["web_control"] = config.getboolean("web", "web_control")
auto_rx_config["ngp_tweak"] = config.getboolean("advanced", "ngp_tweak")
auto_rx_config["gpsd_enabled"] = config.getboolean(
"location", "gpsd_enabled"
)
auto_rx_config["gpsd_host"] = config.get("location", "gpsd_host")
auto_rx_config["gpsd_port"] = config.getint("location", "gpsd_port")
except:
logging.warning(
"Config - Did not find web control / ngp_tweak / gpsd options, using defaults (disabled)"
)
auto_rx_config["web_control"] = False
auto_rx_config["ngp_tweak"] = False
auto_rx_config["gpsd_enabled"] = False
try:
auto_rx_config["min_radius_km"] = config.getint(
"filtering", "min_radius_km"
)
auto_rx_config["radius_temporary_block"] = config.getboolean(
"filtering", "radius_temporary_block"
)
except:
logging.warning(
"Config - Did not find minimum radius filter setting, using default (0km)."
)
auto_rx_config["min_radius_km"] = 0
auto_rx_config["radius_temporary_block"] = False
try:
auto_rx_config["aprs_use_custom_object_id"] = config.getboolean(
"aprs", "aprs_use_custom_object_id"
)
except:
logging.warning(
"Config - Did not find aprs_use_custom_object_id setting, using default (False)"
)
auto_rx_config["aprs_use_custom_object_id"] = False
try:
auto_rx_config["email_error_notifications"] = config.getboolean(
"email", "email_error_notifications"
)
except:
logging.warning(
"Config - Did not find email_error_notifications setting, using default (False)"
)
auto_rx_config["email_error_notifications"] = False
try:
auto_rx_config["kml_refresh_rate"] = config.getint(
"web", "kml_refresh_rate"
)
except:
logging.warning(
"Config - Did not find kml_refresh_rate setting, using default (10 seconds)."
)
auto_rx_config["kml_refresh_rate"] = 11
# If we are being called as part of a unit test, just return the config now.
if no_sdr_test:
return auto_rx_config
# Now we attempt to read in the individual SDR parameters.
auto_rx_config["sdr_settings"] = {}
for _n in range(1, auto_rx_config["sdr_quantity"] + 1):
_section = "sdr_%d" % _n
try:
_device_idx = config.get(_section, "device_idx")
_ppm = round(config.getfloat(_section, "ppm"))
_gain = config.getfloat(_section, "gain")
_bias = config.getboolean(_section, "bias")
if (auto_rx_config["sdr_quantity"] > 1) and (_device_idx == "0"):
logging.critical(
"Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!"
)
return None
# See if the SDR exists.
_sdr_valid = rtlsdr_test(_device_idx)
if _sdr_valid:
auto_rx_config["sdr_settings"][_device_idx] = {
"ppm": _ppm,
"gain": _gain,
"bias": _bias,
"in_use": False,
"task": None,
}
logging.info("Config - Tested SDR #%s OK" % _device_idx)
else:
logging.warning("Config - SDR #%s invalid." % _device_idx)
except Exception as e:
logging.error(
"Config - Error parsing SDR %d config - %s" % (_n, str(e))
)
continue
# Sanity checks when using more than one SDR
if (len(auto_rx_config["sdr_settings"].keys()) > 1) and (
auto_rx_config["aprs_object_id"] != "<id>"
):
logging.critical(
"Fixed APRS object ID used in a multi-SDR configuration. Go read the warnings in the config file!"
)
return None
if (len(auto_rx_config["sdr_settings"].keys()) > 1) and (
auto_rx_config["rotator_enabled"]
):
logging.critical(
"Rotator enabled in a multi-SDR configuration. Go read the warnings in the config file!"
)
return None
# TODO: Revisit this limitation once the OziPlotter output sub-module is complete.
if (len(auto_rx_config["sdr_settings"].keys()) > 1) and auto_rx_config[
"ozi_enabled"
]:
logging.critical("Oziplotter output enabled in a multi-SDR configuration.")
return None
if len(auto_rx_config["sdr_settings"].keys()) == 0:
# We have no SDRs to use!!
logging.error("Config - No working SDRs! Cannot run...")
return None
else:
# Create a global copy of the configuration file at this point
global_config = copy.deepcopy(auto_rx_config)
# Excise some sensitive parameters from the global config.
global_config.pop("email_smtp_login")
global_config.pop("email_smtp_password")
global_config.pop("email_smtp_server")
return auto_rx_config
except:
traceback.print_exc()
logging.error("Could not parse config file.")
return None
try:
if __name__ == "__main__":
""" Quick test script to attempt to read in a config file. """
import sys, pprint
# Check the file exists.
if not os.path.isfile(filename):
logging.critical("Config file %s does not exist!" % filename)
return None
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
config = RawConfigParser(auto_rx_config)
config.read(filename)
config = read_auto_rx_config(sys.argv[1])
# Log Settings
auto_rx_config['per_sonde_log'] = config.getboolean('logging', 'per_sonde_log')
# Email Settings
if config.has_option('email', 'email_enabled'):
try:
auto_rx_config['email_enabled'] = config.getboolean('email', 'email_enabled')
auto_rx_config['email_smtp_server'] = config.get('email', 'smtp_server')
auto_rx_config['email_smtp_port'] = config.get('email', 'smtp_port')
auto_rx_config['email_smtp_authentication'] = config.get('email', 'smtp_authentication')
auto_rx_config['email_smtp_login'] = config.get('email', 'smtp_login')
auto_rx_config['email_smtp_password'] = config.get('email', 'smtp_password')
auto_rx_config['email_from'] = config.get('email', 'from')
auto_rx_config['email_to'] = config.get('email', 'to')
auto_rx_config['email_subject'] = config.get('email', 'subject')
if auto_rx_config['email_smtp_authentication'] not in ['None', 'TLS', 'SSL']:
logging.error("Config - Invalid email authentication setting. Must be None, TLS or SSL.")
return None
except:
logging.error("Config - Invalid or missing email settings. Disabling.")
auto_rx_config['email_enabled'] = False
# SDR Settings
auto_rx_config['sdr_fm'] = config.get('advanced', 'sdr_fm_path')
auto_rx_config['sdr_power'] = config.get('advanced', 'sdr_power_path')
auto_rx_config['sdr_quantity'] = config.getint('sdr', 'sdr_quantity')
# Search Parameters
auto_rx_config['min_freq'] = config.getfloat('search_params', 'min_freq')
auto_rx_config['max_freq'] = config.getfloat('search_params', 'max_freq')
auto_rx_config['rx_timeout'] = config.getint('search_params', 'rx_timeout')
auto_rx_config['whitelist'] = json.loads(config.get('search_params', 'whitelist'))
auto_rx_config['blacklist'] = json.loads(config.get('search_params', 'blacklist'))
auto_rx_config['greylist'] = json.loads(config.get('search_params', 'greylist'))
# Location Settings
auto_rx_config['station_lat'] = config.getfloat('location', 'station_lat')
auto_rx_config['station_lon'] = config.getfloat('location', 'station_lon')
auto_rx_config['station_alt'] = config.getfloat('location', 'station_alt')
# Position Filtering
auto_rx_config['max_altitude'] = config.getint('filtering', 'max_altitude')
auto_rx_config['max_radius_km'] = config.getint('filtering', 'max_radius_km')
# Habitat Settings
auto_rx_config['habitat_enabled'] = config.getboolean('habitat', 'habitat_enabled')
auto_rx_config['habitat_upload_rate'] = config.getint('habitat', 'upload_rate')
auto_rx_config['habitat_uploader_callsign'] = config.get('habitat', 'uploader_callsign')
auto_rx_config['habitat_upload_listener_position'] = config.getboolean('habitat','upload_listener_position')
auto_rx_config['habitat_uploader_antenna'] = config.get('habitat', 'uploader_antenna').strip()
try: # Use the default configuration if not found
auto_rx_config['habitat_url'] = config.get('habitat','url')
except:
pass
if auto_rx_config['habitat_upload_rate'] < MINIMUM_HABITAT_UPDATE_RATE:
logging.warning("Config - Habitat Update Rate clipped to minimum of %d seconds. Please be respectful of other users of Habitat." % MINIMUM_HABITAT_UPDATE_RATE)
auto_rx_config['habitat_upload_rate'] = MINIMUM_HABITAT_UPDATE_RATE
# APRS Settings
auto_rx_config['aprs_enabled'] = config.getboolean('aprs', 'aprs_enabled')
auto_rx_config['aprs_upload_rate'] = config.getint('aprs', 'upload_rate')
auto_rx_config['aprs_user'] = config.get('aprs', 'aprs_user')
auto_rx_config['aprs_pass'] = config.get('aprs', 'aprs_pass')
auto_rx_config['aprs_server'] = config.get('aprs', 'aprs_server')
auto_rx_config['aprs_object_id'] = config.get('aprs', 'aprs_object_id')
auto_rx_config['aprs_custom_comment'] = config.get('aprs', 'aprs_custom_comment')
auto_rx_config['aprs_position_report'] = config.getboolean('aprs','aprs_position_report')
auto_rx_config['station_beacon_enabled'] = config.getboolean('aprs','station_beacon_enabled')
auto_rx_config['station_beacon_rate'] = config.getint('aprs', 'station_beacon_rate')
auto_rx_config['station_beacon_comment'] = config.get('aprs', 'station_beacon_comment')
auto_rx_config['station_beacon_icon'] = config.get('aprs', 'station_beacon_icon')
if auto_rx_config['aprs_upload_rate'] < MINIMUM_APRS_UPDATE_RATE:
logging.warning("Config - APRS Update Rate clipped to minimum of %d seconds. Please be respectful of other users of APRS-IS." % MINIMUM_APRS_UPDATE_RATE)
auto_rx_config['aprs_upload_rate'] = MINIMUM_APRS_UPDATE_RATE
# OziPlotter Settings
auto_rx_config['ozi_enabled'] = config.getboolean('oziplotter', 'ozi_enabled')
auto_rx_config['ozi_update_rate'] = config.getint('oziplotter', 'ozi_update_rate')
auto_rx_config['ozi_port'] = config.getint('oziplotter', 'ozi_port')
auto_rx_config['payload_summary_enabled'] = config.getboolean('oziplotter', 'payload_summary_enabled')
auto_rx_config['payload_summary_port'] = config.getint('oziplotter', 'payload_summary_port')
# Advanced Settings
auto_rx_config['search_step'] = config.getfloat('advanced', 'search_step')
auto_rx_config['snr_threshold'] = config.getfloat('advanced', 'snr_threshold')
auto_rx_config['min_distance'] = config.getfloat('advanced', 'min_distance')
auto_rx_config['dwell_time'] = config.getint('advanced', 'dwell_time')
auto_rx_config['quantization'] = config.getint('advanced', 'quantization')
auto_rx_config['max_peaks'] = config.getint('advanced', 'max_peaks')
auto_rx_config['scan_dwell_time'] = config.getint('advanced', 'scan_dwell_time')
auto_rx_config['detect_dwell_time'] = config.getint('advanced', 'detect_dwell_time')
auto_rx_config['scan_delay'] = config.getint('advanced', 'scan_delay')
auto_rx_config['payload_id_valid'] = config.getint('advanced', 'payload_id_valid')
auto_rx_config['synchronous_upload'] = config.getboolean('advanced', 'synchronous_upload')
# Rotator Settings
auto_rx_config['rotator_enabled'] = config.getboolean('rotator','rotator_enabled')
auto_rx_config['rotator_update_rate'] = config.getint('rotator', 'update_rate')
auto_rx_config['rotator_hostname'] = config.get('rotator', 'rotator_hostname')
auto_rx_config['rotator_port'] = config.getint('rotator', 'rotator_port')
auto_rx_config['rotator_homing_enabled'] = config.getboolean('rotator', 'rotator_homing_enabled')
auto_rx_config['rotator_home_azimuth'] = config.getfloat('rotator', 'rotator_home_azimuth')
auto_rx_config['rotator_home_elevation'] = config.getfloat('rotator', 'rotator_home_elevation')
auto_rx_config['rotator_homing_delay'] = config.getint('rotator', 'rotator_homing_delay')
auto_rx_config['rotation_threshold'] = config.getfloat('rotator', 'rotation_threshold')
# Web interface settings.
auto_rx_config['web_host'] = config.get('web', 'web_host')
auto_rx_config['web_port'] = config.getint('web', 'web_port')
auto_rx_config['web_archive_age'] = config.getint('web', 'archive_age')
auto_rx_config['save_detection_audio'] = config.getboolean('debugging', 'save_detection_audio')
auto_rx_config['save_decode_audio'] = config.getboolean('debugging', 'save_decode_audio')
auto_rx_config['save_decode_iq'] = config.getboolean('debugging', 'save_decode_iq')
# NOTE 2019-09-21: The station code will now be fixed at the default to avoid multiple iMet callsign issues.
# auto_rx_config['station_code'] = config.get('location', 'station_code')
# if len(auto_rx_config['station_code']) > 5:
# auto_rx_config['station_code'] = auto_rx_config['station_code'][:5]
# logging.warning("Config - Clipped station code to 5 digits: %s" % auto_rx_config['station_code'])
auto_rx_config['temporary_block_time'] = config.getint('advanced', 'temporary_block_time')
# New demod tweaks - Added 2019-04-23
# Default to all experimental decoders on.
auto_rx_config['experimental_decoders'] = {
'RS41': True,
'RS92': True,
'DFM': True,
'M10': True,
'M20': True,
'IMET': False,
'LMS6': True,
'MK2LMS': False,
'MEISEI': False,
'UDP': False}
auto_rx_config['rs41_drift_tweak'] = config.getboolean('advanced', 'drift_tweak')
auto_rx_config['decoder_spacing_limit'] = config.getint('advanced', 'decoder_spacing_limit')
auto_rx_config['experimental_decoders']['RS41'] = config.getboolean('advanced', 'rs41_experimental')
auto_rx_config['experimental_decoders']['RS92'] = config.getboolean('advanced', 'rs92_experimental')
auto_rx_config['experimental_decoders']['M10'] = config.getboolean('advanced', 'm10_experimental')
auto_rx_config['experimental_decoders']['DFM'] = config.getboolean('advanced', 'dfm_experimental')
auto_rx_config['experimental_decoders']['LMS6'] = config.getboolean('advanced', 'lms6-400_experimental')
try:
auto_rx_config['web_control'] = config.getboolean('web', 'web_control')
auto_rx_config['ngp_tweak'] = config.getboolean('advanced', 'ngp_tweak')
auto_rx_config['gpsd_enabled'] = config.getboolean('location', 'gpsd_enabled')
auto_rx_config['gpsd_host'] = config.get('location', 'gpsd_host')
auto_rx_config['gpsd_port'] = config.getint('location', 'gpsd_port')
except:
logging.warning("Config - Did not find web control / ngp_tweak / gpsd options, using defaults (disabled)")
auto_rx_config['web_control'] = False
auto_rx_config['ngp_tweak'] = False
auto_rx_config['gpsd_enabled'] = False
try:
auto_rx_config['min_radius_km'] = config.getint('filtering', 'min_radius_km')
auto_rx_config['radius_temporary_block'] = config.getboolean('filtering', 'radius_temporary_block')
except:
logging.warning("Config - Did not find minimum radius filter setting, using default (0km).")
auto_rx_config['min_radius_km'] = 0
auto_rx_config['radius_temporary_block'] = False
try:
auto_rx_config['aprs_use_custom_object_id'] = config.getboolean('aprs','aprs_use_custom_object_id')
except:
logging.warning("Config - Did not find aprs_use_custom_object_id setting, using default (False)")
auto_rx_config['aprs_use_custom_object_id'] = False
try:
auto_rx_config['email_error_notifications'] = config.getboolean('email', 'email_error_notifications')
except:
logging.warning("Config - Did not find email_error_notifications setting, using default (False)")
auto_rx_config['email_error_notifications'] = False
# If we are being called as part of a unit test, just return the config now.
if no_sdr_test:
return auto_rx_config
# Now we attempt to read in the individual SDR parameters.
auto_rx_config['sdr_settings'] = {}
for _n in range(1,auto_rx_config['sdr_quantity']+1):
_section = "sdr_%d" % _n
try:
_device_idx = config.get(_section,'device_idx')
_ppm = round(config.getfloat(_section, 'ppm'))
_gain = config.getfloat(_section, 'gain')
_bias = config.getboolean(_section, 'bias')
if (auto_rx_config['sdr_quantity'] > 1) and (_device_idx == '0'):
logging.critical("Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!")
return None
# See if the SDR exists.
_sdr_valid = rtlsdr_test(_device_idx)
if _sdr_valid:
auto_rx_config['sdr_settings'][_device_idx] = {'ppm':_ppm, 'gain':_gain, 'bias':_bias, 'in_use': False, 'task': None}
logging.info('Config - Tested SDR #%s OK' % _device_idx)
else:
logging.warning("Config - SDR #%s invalid." % _device_idx)
except Exception as e:
logging.error("Config - Error parsing SDR %d config - %s" % (_n,str(e)))
continue
# Sanity checks when using more than one SDR
if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['aprs_object_id'] != "<id>"):
logging.critical("Fixed APRS object ID used in a multi-SDR configuration. Go read the warnings in the config file!")
return None
if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['rotator_enabled']):
logging.critical("Rotator enabled in a multi-SDR configuration. Go read the warnings in the config file!")
return None
# TODO: Revisit this limitation once the OziPlotter output sub-module is complete.
if (len(auto_rx_config['sdr_settings'].keys()) > 1) and auto_rx_config['ozi_enabled']:
logging.critical("Oziplotter output enabled in a multi-SDR configuration.")
return None
if len(auto_rx_config['sdr_settings'].keys()) == 0:
# We have no SDRs to use!!
logging.error("Config - No working SDRs! Cannot run...")
return None
else:
# Create a global copy of the configuration file at this point
global_config = copy.deepcopy(auto_rx_config)
# Excise some sensitive parameters from the global config.
global_config.pop('email_smtp_login')
global_config.pop('email_smtp_password')
global_config.pop('email_smtp_server')
return auto_rx_config
except:
traceback.print_exc()
logging.error("Could not parse config file.")
return None
if __name__ == '__main__':
''' Quick test script to attempt to read in a config file. '''
import sys, pprint
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
config = read_auto_rx_config(sys.argv[1])
pprint.pprint(global_config)
pprint.pprint(global_config)

Plik diff jest za duży Load Diff

Wyświetl plik

@ -33,9 +33,20 @@ class EmailNotification(object):
"""
# We require the following fields to be present in the input telemetry dict.
REQUIRED_FIELDS = [ 'id', 'lat', 'lon', 'alt', 'type', 'freq']
REQUIRED_FIELDS = ["id", "lat", "lon", "alt", "type", "freq"]
def __init__(self, smtp_server = 'localhost', smtp_port=25, smtp_authentication='None', smtp_login="None", smtp_password="None", mail_from = None, mail_to = None, mail_subject = None, station_position = None):
def __init__(
self,
smtp_server="localhost",
smtp_port=25,
smtp_authentication="None",
smtp_login="None",
smtp_password="None",
mail_from=None,
mail_to=None,
mail_subject=None,
station_position=None,
):
""" Init a new E-Mail Notification Thread """
self.smtp_server = smtp_server
self.smtp_port = smtp_port
@ -55,12 +66,11 @@ class EmailNotification(object):
# Start queue processing thread.
self.input_processing_running = True
self.input_thread = Thread(target = self.process_queue)
self.input_thread = Thread(target=self.process_queue)
self.input_thread.start()
self.log_info("Started E-Mail Notifier Thread")
def add(self, telemetry):
""" Add a telemetery dictionary to the input queue. """
# Check the telemetry dictionary contains the required fields.
@ -75,7 +85,6 @@ class EmailNotification(object):
else:
self.log_error("Processing not running, discarding.")
def process_queue(self):
""" Process packets from the input queue. """
while self.input_processing_running:
@ -92,93 +101,98 @@ class EmailNotification(object):
# Sleep while waiting for some new data.
time.sleep(0.5)
def process_telemetry(self, telemetry):
""" Process a new telemmetry dict, and send an e-mail if it is a new sonde. """
_id = telemetry['id']
_id = telemetry["id"]
if _id not in self.sondes:
try:
# This is a new sonde. Send the email.
msg = 'Sonde launch detected:\n'
msg += '\n'
msg = "Sonde launch detected:\n"
msg += "\n"
if 'encrypted' in telemetry:
if telemetry['encrypted'] == True:
if "encrypted" in telemetry:
if telemetry["encrypted"] == True:
msg += "ENCRYPTED RADIOSONDE DETECTED!\n"
msg += 'Callsign: %s\n' % _id
msg += 'Type: %s\n' % telemetry['type']
msg += 'Frequency: %s\n' % telemetry['freq']
msg += 'Position: %.5f,%.5f\n' % (telemetry['lat'], telemetry['lon'])
msg += 'Altitude: %d m\n' % round(telemetry['alt'])
msg += "Callsign: %s\n" % _id
msg += "Type: %s\n" % telemetry["type"]
msg += "Frequency: %s\n" % telemetry["freq"]
msg += "Position: %.5f,%.5f\n" % (telemetry["lat"], telemetry["lon"])
msg += "Altitude: %d m\n" % round(telemetry["alt"])
if self.station_position != None:
_relative_position = position_info(self.station_position, (telemetry['lat'], telemetry['lon'], telemetry['alt']))
msg += 'Range: %.1f km\n' % (_relative_position['straight_distance']/1000.0)
msg += 'Bearing: %d degrees True\n' % int(_relative_position['bearing'])
_relative_position = position_info(
self.station_position,
(telemetry["lat"], telemetry["lon"], telemetry["alt"]),
)
msg += "Range: %.1f km\n" % (
_relative_position["straight_distance"] / 1000.0
)
msg += "Bearing: %d degrees True\n" % int(
_relative_position["bearing"]
)
msg += '\n'
#msg += 'https://tracker.habhub.org/#!qm=All&q=RS_%s\n' % _id
msg += 'https://sondehub.org/%s\n' % _id
msg += "\n"
# msg += 'https://tracker.habhub.org/#!qm=All&q=RS_%s\n' % _id
msg += "https://sondehub.org/%s\n" % _id
# Construct subject
_subject = self.mail_subject
_subject = _subject.replace('<id>', telemetry['id'])
_subject = _subject.replace('<type>', telemetry['type'])
_subject = _subject.replace('<freq>', telemetry['freq'])
_subject = _subject.replace("<id>", telemetry["id"])
_subject = _subject.replace("<type>", telemetry["type"])
_subject = _subject.replace("<freq>", telemetry["freq"])
if 'encrypted' in telemetry:
if telemetry['encrypted'] == True:
if "encrypted" in telemetry:
if telemetry["encrypted"] == True:
_subject += " - ENCRYPTED SONDE"
logging.debug("Email - Subject: %s" % _subject)
logging.debug("Email - Message: %s" % msg)
# Connect to the SMTP server.
if self.smtp_authentication == 'SSL':
if self.smtp_authentication == "SSL":
s = smtplib.SMTP_SSL(self.smtp_server, self.smtp_port)
else:
s = smtplib.SMTP(self.smtp_server, self.smtp_port)
if self.smtp_authentication == 'TLS':
if self.smtp_authentication == "TLS":
s.starttls()
if self.smtp_login != "None":
s.login(self.smtp_login, self.smtp_password)
s.login(self.smtp_login, self.smtp_password)
# Send messages to all recepients.
for _destination in self.mail_to.split(';'):
mime_msg = MIMEText(msg, 'plain', 'UTF-8')
for _destination in self.mail_to.split(";"):
mime_msg = MIMEText(msg, "plain", "UTF-8")
mime_msg['From'] = self.mail_from
mime_msg['To'] = _destination
mime_msg["From"] = self.mail_from
mime_msg["To"] = _destination
mime_msg["Date"] = formatdate()
mime_msg['Subject'] = _subject
mime_msg["Subject"] = _subject
s.sendmail(mime_msg['From'], _destination, mime_msg.as_string())
s.sendmail(mime_msg["From"], _destination, mime_msg.as_string())
time.sleep(2)
s.quit()
self.log_info("E-mail sent.")
except Exception as e:
self.log_error("Error sending E-mail - %s" % str(e))
self.sondes[_id] = { 'last_time': time.time() }
self.sondes[_id] = {"last_time": time.time()}
def send_notification_email(self, subject="radiosonde_auto_rx Station Notification", message="Foobar"):
def send_notification_email(
self, subject="radiosonde_auto_rx Station Notification", message="Foobar"
):
""" Generic e-mail notification function, for sending error messages. """
try:
msg = 'radiosonde_auto_rx Email Notification Message:\n'
msg += 'Timestamp: %s\n' % datetime.datetime.now().isoformat()
msg = "radiosonde_auto_rx Email Notification Message:\n"
msg += "Timestamp: %s\n" % datetime.datetime.now().isoformat()
msg += message
msg += '\n'
msg += "\n"
# Construct subject
_subject = subject
@ -188,41 +202,38 @@ class EmailNotification(object):
# Connect to the SMTP server.
if self.smtp_authentication == 'SSL':
if self.smtp_authentication == "SSL":
s = smtplib.SMTP_SSL(self.smtp_server, self.smtp_port)
else:
s = smtplib.SMTP(self.smtp_server, self.smtp_port)
if self.smtp_authentication == 'TLS':
if self.smtp_authentication == "TLS":
s.starttls()
if self.smtp_login != "None":
s.login(self.smtp_login, self.smtp_password)
s.login(self.smtp_login, self.smtp_password)
# Send messages to all recepients.
for _destination in self.mail_to.split(';'):
mime_msg = MIMEText(msg, 'plain', 'UTF-8')
for _destination in self.mail_to.split(";"):
mime_msg = MIMEText(msg, "plain", "UTF-8")
mime_msg['From'] = self.mail_from
mime_msg['To'] = _destination
mime_msg["From"] = self.mail_from
mime_msg["To"] = _destination
mime_msg["Date"] = formatdate()
mime_msg['Subject'] = _subject
mime_msg["Subject"] = _subject
s.sendmail(mime_msg['From'], _destination, mime_msg.as_string())
s.sendmail(mime_msg["From"], _destination, mime_msg.as_string())
time.sleep(2)
s.quit()
self.log_info("E-mail notification sent.")
except Exception as e:
self.log_error("Error sending E-mail notification - %s" % str(e))
pass
def close(self):
""" Close input processing thread. """
self.log_debug("Waiting for processing thread to close...")
@ -231,7 +242,6 @@ class EmailNotification(object):
if self.input_thread is not None:
self.input_thread.join()
def running(self):
""" Check if the logging thread is running.
@ -240,7 +250,6 @@ class EmailNotification(object):
"""
return self.input_processing_running
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
@ -248,7 +257,6 @@ class EmailNotification(object):
"""
logging.debug("E-Mail - %s" % line)
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
@ -256,7 +264,6 @@ class EmailNotification(object):
"""
logging.info("E-Mail - %s" % line)
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:
@ -269,21 +276,23 @@ if __name__ == "__main__":
# Test Script - Send an example email using the settings in station.cfg
import sys
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
# Read in the station config, which contains the email settings.
config = read_auto_rx_config('station.cfg', no_sdr_test=True)
config = read_auto_rx_config("station.cfg", no_sdr_test=True)
# Start up an email notifification object.
_email_notification = EmailNotification(
smtp_server = config['email_smtp_server'],
smtp_port = config['email_smtp_port'],
smtp_authentication = config['email_smtp_authentication'],
smtp_login = config['email_smtp_login'],
smtp_password = config['email_smtp_password'],
mail_from = config['email_from'],
mail_to = config['email_to'],
mail_subject = config['email_subject']
smtp_server=config["email_smtp_server"],
smtp_port=config["email_smtp_port"],
smtp_authentication=config["email_smtp_authentication"],
smtp_login=config["email_smtp_login"],
smtp_password=config["email_smtp_password"],
mail_from=config["email_from"],
mail_to=config["email_to"],
mail_subject=config["email_subject"],
)
# Wait a second..
@ -294,7 +303,23 @@ if __name__ == "__main__":
time.sleep(1)
# Add in a packet of telemetry, which will cause the email notifier to send an email.
_email_notification.add({'id':'N1234557', 'frame':10, 'lat':-10.0, 'lon':10.0, 'alt':10000, 'temp':1.0, 'type':'RS41', 'freq':'401.520 MHz', 'freq_float':401.52, 'heading':0.0, 'vel_h':5.1, 'vel_v':-5.0, 'datetime_dt':datetime.datetime.utcnow()})
_email_notification.add(
{
"id": "N1234557",
"frame": 10,
"lat": -10.0,
"lon": 10.0,
"alt": 10000,
"temp": 1.0,
"type": "RS41",
"freq": "401.520 MHz",
"freq_float": 401.52,
"heading": 0.0,
"vel_h": 5.1,
"vel_v": -5.0,
"datetime_dt": datetime.datetime.utcnow(),
}
)
# Wait a little bit before shutting down.
time.sleep(5)

Wyświetl plik

@ -20,15 +20,9 @@ class FSKDemodStats(object):
The test script below will emulate relatime input based on a file.
"""
FSK_STATS_FIELDS = ["EbNodB", "ppm", "f1_est", "f2_est", "samp_fft"]
FSK_STATS_FIELDS = ['EbNodB', 'ppm', 'f1_est', 'f2_est', 'samp_fft']
def __init__(self,
averaging_time = 5.0,
peak_hold = False,
decoder_id = ""
):
def __init__(self, averaging_time=5.0, peak_hold=False, decoder_id=""):
"""
Required Fields:
@ -47,15 +41,12 @@ class FSKDemodStats(object):
self.in_snr = np.array([])
self.in_ppm = np.array([])
# Output State variables.
self.snr = -999.0
self.fest = [0.0,0.0]
self.fest = [0.0, 0.0]
self.fft = []
self.ppm = 0.0
def update(self, data):
"""
Update the statistics parser with a new set of output from fsk_demod.
@ -67,20 +58,19 @@ class FSKDemodStats(object):
# Check input type
if type(data) == bytes:
data = data.decode('ascii')
data = data.decode("ascii")
if type(data) == dict:
_data = data
else:
# Attempt to parse string.
try:
_data = json.loads(data)
except Exception as e:
# Be quiet for now...
#self.log_error("FSK Demod Stats - %s" % str(e))
# self.log_error("FSK Demod Stats - %s" % str(e))
return
# Check for required fields in incoming dictionary.
for _field in self.FSK_STATS_FIELDS:
@ -90,18 +80,17 @@ class FSKDemodStats(object):
# Now we can process the data.
_time = time.time()
self.fft = _data['samp_fft']
self.fest[0] = _data['f1_est']
self.fest[1] = _data['f2_est']
self.fft = _data["samp_fft"]
self.fest[0] = _data["f1_est"]
self.fest[1] = _data["f2_est"]
# Time-series data
self.in_times = np.append(self.in_times, _time)
self.in_snr = np.append(self.in_snr, _data['EbNodB'])
self.in_ppm = np.append(self.in_ppm, _data['ppm'])
self.in_snr = np.append(self.in_snr, _data["EbNodB"])
self.in_ppm = np.append(self.in_ppm, _data["ppm"])
# Calculate SNR / PPM
_time_range = self.in_times>(_time-self.averaging_time)
_time_range = self.in_times > (_time - self.averaging_time)
# Clip arrays to just the values we want
self.in_ppm = self.in_ppm[_time_range]
self.in_snr = self.in_snr[_time_range]
@ -115,7 +104,6 @@ class FSKDemodStats(object):
else:
self.snr = np.mean(self.in_snr)
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
@ -123,7 +111,6 @@ class FSKDemodStats(object):
"""
logging.debug("FSK Demod Stats #%s - %s" % (str(self.decoder_id), line))
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
@ -131,7 +118,6 @@ class FSKDemodStats(object):
"""
logging.info("FSK Demod Stats #%s - %s" % (str(self.decoder_id), line))
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:
@ -140,12 +126,12 @@ class FSKDemodStats(object):
logging.error("FSK Demod Stats #%s - %s" % (str(self.decoder_id), line))
if __name__ == "__main__":
import sys
_filename = sys.argv[1]
_f = open(_filename,'r')
_f = open(_filename, "r")
stats = FSKDemodStats(averaging_time=2.0, peak_hold=True)
@ -158,15 +144,15 @@ if __name__ == "__main__":
_line = json.loads(_line)
except:
continue
stats.update(_line)
time.sleep(1/rate)
time.sleep(1 / rate)
if count%updaterate == 0:
print("%d - SNR: %.1f dB, FEst: %s, ppm: %.1f" % (count, stats.snr, stats.fest, stats.ppm))
if count % updaterate == 0:
print(
"%d - SNR: %.1f dB, FEst: %s, ppm: %.1f"
% (count, stats.snr, stats.fest, stats.ppm)
)
count += 1

Wyświetl plik

@ -13,36 +13,44 @@ from .utils import position_info
def getDensity(altitude):
'''
"""
Calculate the atmospheric density for a given altitude in metres.
This is a direct port of the oziplotter Atmosphere class
'''
"""
#Constants
# Constants
airMolWeight = 28.9644 # Molecular weight of air
densitySL = 1.225 # Density at sea level [kg/m3]
pressureSL = 101325 # Pressure at sea level [Pa]
densitySL = 1.225 # Density at sea level [kg/m3]
pressureSL = 101325 # Pressure at sea level [Pa]
temperatureSL = 288.15 # Temperature at sea level [deg K]
gamma = 1.4
gravity = 9.80665 # Acceleration of gravity [m/s2]
gravity = 9.80665 # Acceleration of gravity [m/s2]
tempGrad = -0.0065 # Temperature gradient [deg K/m]
RGas = 8.31432 # Gas constant [kg/Mol/K]
R = 287.053
deltaTemperature = 0.0;
R = 287.053
deltaTemperature = 0.0
# Lookup Tables
altitudes = [0, 11000, 20000, 32000, 47000, 51000, 71000, 84852]
pressureRels = [1, 2.23361105092158e-1, 5.403295010784876e-2, 8.566678359291667e-3, 1.0945601337771144e-3, 6.606353132858367e-4, 3.904683373343926e-5, 3.6850095235747942e-6]
pressureRels = [
1,
2.23361105092158e-1,
5.403295010784876e-2,
8.566678359291667e-3,
1.0945601337771144e-3,
6.606353132858367e-4,
3.904683373343926e-5,
3.6850095235747942e-6,
]
temperatures = [288.15, 216.65, 216.65, 228.65, 270.65, 270.65, 214.65, 186.946]
tempGrads = [-6.5, 0, 1, 2.8, 0, -2.8, -2, 0]
gMR = gravity * airMolWeight / RGas;
gMR = gravity * airMolWeight / RGas
# Pick a region to work in
i = 0
if(altitude > 0):
while (altitude > altitudes[i+1]):
if altitude > 0:
while altitude > altitudes[i + 1]:
i = i + 1
# Lookup based on region
baseTemp = temperatures[i]
@ -52,11 +60,14 @@ def getDensity(altitude):
temperature = baseTemp + tempGrad * deltaAltitude
# Calculate relative pressure
if(math.fabs(tempGrad) < 1e-10):
pressureRel = pressureRelBase * math.exp(-1 *gMR * deltaAltitude / 1000.0 / baseTemp)
if math.fabs(tempGrad) < 1e-10:
pressureRel = pressureRelBase * math.exp(
-1 * gMR * deltaAltitude / 1000.0 / baseTemp
)
else:
pressureRel = pressureRelBase * math.pow(baseTemp / temperature, gMR / tempGrad / 1000.0)
pressureRel = pressureRelBase * math.pow(
baseTemp / temperature, gMR / tempGrad / 1000.0
)
# Add temperature offset
temperature = temperature + deltaTemperature
@ -70,15 +81,16 @@ def getDensity(altitude):
def seaLevelDescentRate(descent_rate, altitude):
''' Calculate the descent rate at sea level, for a given descent rate at altitude '''
""" Calculate the descent rate at sea level, for a given descent rate at altitude """
rho = getDensity(altitude)
return math.sqrt((rho / 1.22) * math.pow(descent_rate, 2))
def time_to_landing(current_altitude, current_descent_rate=-5.0, ground_asl=0.0, step_size=1):
''' Calculate an estimated time to landing (in seconds) of a payload, based on its current altitude and descent rate '''
def time_to_landing(
current_altitude, current_descent_rate=-5.0, ground_asl=0.0, step_size=1
):
""" Calculate an estimated time to landing (in seconds) of a payload, based on its current altitude and descent rate """
# A few checks on the input data.
if current_descent_rate > 0.0:
@ -91,20 +103,19 @@ def time_to_landing(current_altitude, current_descent_rate=-5.0, ground_asl=0.0,
# Calculate the sea level descent rate.
_desc_rate = math.fabs(seaLevelDescentRate(current_descent_rate, current_altitude))
_drag_coeff = _desc_rate*1.1045 # Magic multiplier from predict.php
_drag_coeff = _desc_rate * 1.1045 # Magic multiplier from predict.php
_alt = current_altitude
_start_time = 0
# Now step through the flight in <step_size> second steps.
# Once the altitude is below our ground level, stop, and return the elapsed time.
while _alt >= ground_asl:
_alt += step_size * -1*(_drag_coeff/math.sqrt(getDensity(_alt)))
_alt += step_size * -1 * (_drag_coeff / math.sqrt(getDensity(_alt)))
_start_time += step_size
return _start_time
class GenericTrack(object):
"""
A Generic 'track' object, which stores track positions for a payload or chase car.
@ -114,10 +125,8 @@ class GenericTrack(object):
The track history can be exported to a LineString using the to_line_string method.
"""
def __init__(self,
ascent_averaging = 6,
landing_rate = 5.0):
''' Create a GenericTrack Object. '''
def __init__(self, ascent_averaging=6, landing_rate=5.0):
""" Create a GenericTrack Object. """
# Averaging rate.
self.ASCENT_AVERAGING = ascent_averaging
@ -132,20 +141,19 @@ class GenericTrack(object):
# Data is stored as a list-of-lists, with elements of [datetime, lat, lon, alt, comment]
self.track_history = []
def add_telemetry(self,data_dict):
'''
def add_telemetry(self, data_dict):
"""
Accept telemetry data as a dictionary with fields
datetime, lat, lon, alt, comment
'''
"""
try:
_datetime = data_dict['time']
_lat = data_dict['lat']
_lon = data_dict['lon']
_alt = data_dict['alt']
if 'comment' in data_dict.keys():
_comment = data_dict['comment']
_datetime = data_dict["time"]
_lat = data_dict["lat"]
_lon = data_dict["lon"]
_alt = data_dict["alt"]
if "comment" in data_dict.keys():
_comment = data_dict["comment"]
else:
_comment = ""
@ -158,83 +166,93 @@ class GenericTrack(object):
# We can safely skip over these.
pass
except Exception as e:
logging.debug("Web - Error adding new telemetry to GenericTrack %s" % str(e))
logging.debug(
"Web - Error adding new telemetry to GenericTrack %s" % str(e)
)
def get_latest_state(self):
''' Get the latest position of the payload '''
""" Get the latest position of the payload """
if len(self.track_history) == 0:
return None
else:
_latest_position = self.track_history[-1]
_state = {
'time' : _latest_position[0],
'lat' : _latest_position[1],
'lon' : _latest_position[2],
'alt' : _latest_position[3],
'ascent_rate': self.ascent_rate,
'is_descending': self.is_descending,
'landing_rate': self.landing_rate,
'heading': self.heading,
'speed': self.speed
"time": _latest_position[0],
"lat": _latest_position[1],
"lon": _latest_position[2],
"alt": _latest_position[3],
"ascent_rate": self.ascent_rate,
"is_descending": self.is_descending,
"landing_rate": self.landing_rate,
"heading": self.heading,
"speed": self.speed,
}
return _state
def calculate_ascent_rate(self):
''' Calculate the ascent/descent rate of the payload based on the available data '''
""" Calculate the ascent/descent rate of the payload based on the available data """
if len(self.track_history) <= 1:
return 0.0
elif len(self.track_history) == 2:
# Basic ascent rate case - only 2 samples.
_time_delta = (self.track_history[-1][0] - self.track_history[-2][0]).total_seconds()
_time_delta = (
self.track_history[-1][0] - self.track_history[-2][0]
).total_seconds()
_altitude_delta = self.track_history[-1][3] - self.track_history[-2][3]
return _altitude_delta/_time_delta
return _altitude_delta / _time_delta
else:
_num_samples = min(len(self.track_history), self.ASCENT_AVERAGING)
_asc_rates = []
for _i in range(-1*(_num_samples-1), 0):
_time_delta = (self.track_history[_i][0] - self.track_history[_i-1][0]).total_seconds()
_altitude_delta = self.track_history[_i][3] - self.track_history[_i-1][3]
_asc_rates.append(_altitude_delta/_time_delta)
for _i in range(-1 * (_num_samples - 1), 0):
_time_delta = (
self.track_history[_i][0] - self.track_history[_i - 1][0]
).total_seconds()
_altitude_delta = (
self.track_history[_i][3] - self.track_history[_i - 1][3]
)
_asc_rates.append(_altitude_delta / _time_delta)
return np.mean(_asc_rates)
def calculate_heading(self):
''' Calculate the heading of the payload '''
""" Calculate the heading of the payload """
if len(self.track_history) <= 1:
return 0.0
else:
_pos_1 = self.track_history[-2]
_pos_2 = self.track_history[-1]
_pos_info = position_info((_pos_1[1],_pos_1[2],_pos_1[3]), (_pos_2[1],_pos_2[2],_pos_2[3]))
_pos_info = position_info(
(_pos_1[1], _pos_1[2], _pos_1[3]), (_pos_2[1], _pos_2[2], _pos_2[3])
)
return _pos_info['bearing']
return _pos_info["bearing"]
def calculate_speed(self):
""" Calculate Payload Speed in metres per second """
if len(self.track_history)<=1:
if len(self.track_history) <= 1:
return 0.0
else:
_time_delta = (self.track_history[-1][0] - self.track_history[-2][0]).total_seconds()
_time_delta = (
self.track_history[-1][0] - self.track_history[-2][0]
).total_seconds()
_pos_1 = self.track_history[-2]
_pos_2 = self.track_history[-1]
_pos_info = position_info((_pos_1[1],_pos_1[2],_pos_1[3]), (_pos_2[1],_pos_2[2],_pos_2[3]))
_pos_info = position_info(
(_pos_1[1], _pos_1[2], _pos_1[3]), (_pos_2[1], _pos_2[2], _pos_2[3])
)
_speed = _pos_info['great_circle_distance']/_time_delta
_speed = _pos_info["great_circle_distance"] / _time_delta
return _speed
def update_states(self):
''' Update internal states based on the current data '''
""" Update internal states based on the current data """
self.ascent_rate = self.calculate_ascent_rate()
self.heading = self.calculate_heading()
self.speed = self.calculate_speed()
@ -244,9 +262,8 @@ class GenericTrack(object):
_current_alt = self.track_history[-1][3]
self.landing_rate = seaLevelDescentRate(self.ascent_rate, _current_alt)
def to_polyline(self):
''' Generate and return a Leaflet PolyLine compatible array '''
""" Generate and return a Leaflet PolyLine compatible array """
# Copy array into a numpy representation for easier slicing.
if len(self.track_history) == 0:
return []
@ -257,6 +274,8 @@ class GenericTrack(object):
else:
_track_data_np = np.array(self.track_history)
# Produce new array
_track_points = np.column_stack((_track_data_np[:,1], _track_data_np[:,2], _track_data_np[:,3]))
_track_points = np.column_stack(
(_track_data_np[:, 1], _track_data_np[:, 2], _track_data_np[:, 3])
)
return _track_points.tolist()

Wyświetl plik

@ -10,68 +10,76 @@ import datetime
import logging
import os
def get_ephemeris(destination="ephemeris.dat"):
''' Download the latest GPS ephemeris file from the CDDIS's FTP server '''
try:
logging.debug("GPS Grabber - Connecting to GSFC FTP Server...")
ftp = ftplib.FTP("cddis.gsfc.nasa.gov", timeout=10)
ftp.login("anonymous","anonymous")
ftp.cwd("gnss/data/daily/%s/brdc/" % datetime.datetime.utcnow().strftime("%Y"))
file_list= ftp.nlst()
""" Download the latest GPS ephemeris file from the CDDIS's FTP server """
try:
logging.debug("GPS Grabber - Connecting to GSFC FTP Server...")
ftp = ftplib.FTP("cddis.gsfc.nasa.gov", timeout=10)
ftp.login("anonymous", "anonymous")
ftp.cwd("gnss/data/daily/%s/brdc/" % datetime.datetime.utcnow().strftime("%Y"))
file_list = ftp.nlst()
# We expect the latest files to be the last in the list.
download_file = None
file_suffix = datetime.datetime.utcnow().strftime("%yn.Z")
# We expect the latest files to be the last in the list.
download_file = None
file_suffix = datetime.datetime.utcnow().strftime("%yn.Z")
if file_suffix in file_list[-1]:
download_file = file_list[-1]
elif file_suffix in file_list[-2]:
download_file = file_list[-2]
else:
logging.error("GPS Grabber - Could not find appropriate ephemeris file.")
return None
if file_suffix in file_list[-1]:
download_file = file_list[-1]
elif file_suffix in file_list[-2]:
download_file = file_list[-2]
else:
logging.error("GPS Grabber - Could not find appropriate ephemeris file.")
return None
logging.debug("GPS Grabber - Downloading ephemeris data file: %s" % download_file)
logging.debug(
"GPS Grabber - Downloading ephemeris data file: %s" % download_file
)
# Download file.
f_eph = open(destination+".Z",'wb')
ftp.retrbinary("RETR %s" % download_file, f_eph.write)
f_eph.close()
ftp.close()
# Download file.
f_eph = open(destination + ".Z", "wb")
ftp.retrbinary("RETR %s" % download_file, f_eph.write)
f_eph.close()
ftp.close()
# Unzip file.
os.system("gunzip -q -f ./%s" % (destination+".Z"))
# Unzip file.
os.system("gunzip -q -f ./%s" % (destination + ".Z"))
logging.info("GPS Grabber - Ephemeris downloaded to %s successfuly!" % destination)
logging.info(
"GPS Grabber - Ephemeris downloaded to %s successfuly!" % destination
)
return destination
except Exception as e:
logging.error("GPS Grabber - Could not download ephemeris file. - %s" % str(e))
return None
return destination
except Exception as e:
logging.error("GPS Grabber - Could not download ephemeris file. - %s" % str(e))
return None
def get_almanac(destination="almanac.txt", timeout=20):
''' Download the latest GPS almanac file from the US Coast Guard website. '''
try:
_r = requests.get("https://www.navcen.uscg.gov/?pageName=currentAlmanac&format=sem", timeout=timeout)
data = _r.text
if "CURRENT.ALM" in data:
f = open(destination,'w')
f.write(data)
f.close()
logging.info("GPS Grabber - Almanac downloaded to %s successfuly!" % destination)
return destination
else:
logging.error("GPS Grabber - Downloaded file is not a GPS almanac.")
return None
except Exception as e:
logging.error("GPS Grabber - Failed to download almanac data - " % str(e))
return None
""" Download the latest GPS almanac file from the US Coast Guard website. """
try:
_r = requests.get(
"https://www.navcen.uscg.gov/?pageName=currentAlmanac&format=sem",
timeout=timeout,
)
data = _r.text
if "CURRENT.ALM" in data:
f = open(destination, "w")
f.write(data)
f.close()
logging.info(
"GPS Grabber - Almanac downloaded to %s successfuly!" % destination
)
return destination
else:
logging.error("GPS Grabber - Downloaded file is not a GPS almanac.")
return None
except Exception as e:
logging.error("GPS Grabber - Failed to download almanac data - " % str(e))
return None
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
get_almanac()
get_ephemeris()
logging.basicConfig(level=logging.DEBUG)
get_almanac()
get_ephemeris()

Wyświetl plik

@ -38,9 +38,9 @@ import time
from threading import Thread
GPSD_HOST = '127.0.0.1' # gpsd
GPSD_HOST = "127.0.0.1" # gpsd
GPSD_PORT = 2947 # defaults
GPSD_PROTOCOL = 'json' # "
GPSD_PROTOCOL = "json" # "
class GPSDSocket(object):
@ -79,14 +79,20 @@ class GPSDSocket(object):
# N.B.: 'timing' requires special attention, as it is undocumented and lives with dragons.
command = '?WATCH={{"enable":true,"{0}":true}}'.format(gpsd_protocol)
if gpsd_protocol == 'rare': # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
if (
gpsd_protocol == "rare"
): # 1 for a channel, gpsd reports the unprocessed NMEA or AIVDM data stream
command = command.replace('"rare":true', '"raw":1')
if gpsd_protocol == 'raw': # 2 channel that processes binary data, received data verbatim without hex-dumping.
if (
gpsd_protocol == "raw"
): # 2 channel that processes binary data, received data verbatim without hex-dumping.
command = command.replace('"raw":true', '"raw",2')
if not enable:
command = command.replace('true', 'false') # sets -all- command values false .
command = command.replace(
"true", "false"
) # sets -all- command values false .
if devicepath:
command = command.replace('}', ',"device":"') + devicepath + '"}'
command = command.replace("}", ',"device":"') + devicepath + '"}'
return self.send(command)
@ -98,7 +104,7 @@ class GPSDSocket(object):
# The POLL command requests data from the last-seen fixes on all active GPS devices.
# Devices must previously have been activated by ?WATCH to be pollable.
try:
self.streamSock.send(bytes(command, encoding='utf-8'))
self.streamSock.send(bytes(command, encoding="utf-8"))
except TypeError:
self.streamSock.send(command) # 2.7 chokes on 'bytes' and 'encoding='
except (OSError, IOError) as error: # MOE, LEAVE THIS ALONE!...for now.
@ -117,15 +123,22 @@ class GPSDSocket(object):
a poll and never blocks.
"""
try:
waitin, _waitout, _waiterror = select.select((self.streamSock,), (), (), timeout)
if not waitin: return None
waitin, _waitout, _waiterror = select.select(
(self.streamSock,), (), (), timeout
)
if not waitin:
return None
else:
gpsd_response = self.streamSock.makefile() # '.makefile(buffering=4096)' In strictly Python3
gpsd_response = (
self.streamSock.makefile()
) # '.makefile(buffering=4096)' In strictly Python3
self.response = gpsd_response.readline()
return self.response
except StopIteration as error:
logging.error("GPSD - The readline exception in GPSDSocket.next is %s" % str(error))
logging.error(
"GPSD - The readline exception in GPSDSocket.next is %s" % str(error)
)
__next__ = next # Workaround for changes in iterating between Python 2.7 and 3
@ -141,29 +154,103 @@ class DataStream(object):
"""Retrieve JSON Object(s) from GPSDSocket and unpack it into respective
gpsd 'class' dictionaries, TPV, SKY, etc. yielding hours of fun and entertainment.
"""
packages = {
'VERSION': {'release', 'proto_major', 'proto_minor', 'remote', 'rev'},
'TPV': {'alt', 'climb', 'device', 'epc', 'epd', 'eps', 'ept', 'epv', 'epx', 'epy', 'lat', 'lon', 'mode', 'speed', 'tag', 'time', 'track'},
'SKY': {'satellites', 'gdop', 'hdop', 'pdop', 'tdop', 'vdop', 'xdop', 'ydop'},
"VERSION": {"release", "proto_major", "proto_minor", "remote", "rev"},
"TPV": {
"alt",
"climb",
"device",
"epc",
"epd",
"eps",
"ept",
"epv",
"epx",
"epy",
"lat",
"lon",
"mode",
"speed",
"tag",
"time",
"track",
},
"SKY": {"satellites", "gdop", "hdop", "pdop", "tdop", "vdop", "xdop", "ydop"},
# Subset of SKY: 'satellites': {'PRN', 'ss', 'el', 'az', 'used'} # is always present.
'GST': {'alt', 'device', 'lat', 'lon', 'major', 'minor', 'orient', 'rms', 'time'},
'ATT': {'acc_len', 'acc_x', 'acc_y', 'acc_z', 'depth', 'device', 'dip', 'gyro_x', 'gyro_y', 'heading', 'mag_len', 'mag_st', 'mag_x',
'mag_y', 'mag_z', 'pitch', 'pitch_st', 'roll', 'roll_st', 'temperature', 'time', 'yaw', 'yaw_st'},
"GST": {
"alt",
"device",
"lat",
"lon",
"major",
"minor",
"orient",
"rms",
"time",
},
"ATT": {
"acc_len",
"acc_x",
"acc_y",
"acc_z",
"depth",
"device",
"dip",
"gyro_x",
"gyro_y",
"heading",
"mag_len",
"mag_st",
"mag_x",
"mag_y",
"mag_z",
"pitch",
"pitch_st",
"roll",
"roll_st",
"temperature",
"time",
"yaw",
"yaw_st",
},
# 'POLL': {'active', 'tpv', 'sky', 'time'},
'PPS': {'device', 'clock_sec', 'clock_nsec', 'real_sec', 'real_nsec', 'precision'},
'TOFF': {'device', 'clock_sec', 'clock_nsec', 'real_sec', 'real_nsec'},
'DEVICES': {'devices', 'remote'},
'DEVICE': {'activated', 'bps', 'cycle', 'mincycle', 'driver', 'flags', 'native', 'parity', 'path', 'stopbits', 'subtype'},
"PPS": {
"device",
"clock_sec",
"clock_nsec",
"real_sec",
"real_nsec",
"precision",
},
"TOFF": {"device", "clock_sec", "clock_nsec", "real_sec", "real_nsec"},
"DEVICES": {"devices", "remote"},
"DEVICE": {
"activated",
"bps",
"cycle",
"mincycle",
"driver",
"flags",
"native",
"parity",
"path",
"stopbits",
"subtype",
},
# 'AIS': {} # see: http://catb.org/gpsd/AIVDM.html
'ERROR': {'message'}} # TODO: Full suite of possible GPSD output
"ERROR": {"message"},
} # TODO: Full suite of possible GPSD output
def __init__(self):
"""Potential data packages from gpsd for a generator of class attribute dictionaries"""
for package_name, dataset in self.packages.items():
_emptydict = {key: 'n/a' for key in dataset}
_emptydict = {key: "n/a" for key in dataset}
setattr(self, package_name, _emptydict)
self.DEVICES['devices'] = {key: 'n/a' for key in self.packages['DEVICE']} # How does multiple listed devices work?
self.DEVICES["devices"] = {
key: "n/a" for key in self.packages["DEVICE"]
} # How does multiple listed devices work?
# self.POLL = {'tpv': self.TPV, 'sky': self.SKY, 'time': 'n/a', 'active': 'n/a'}
def unpack(self, gpsd_socket_response):
@ -178,14 +265,24 @@ class DataStream(object):
applies to a lot of things.
"""
try:
fresh_data = json.loads(gpsd_socket_response) # The reserved word 'class' is popped from JSON object class
package_name = fresh_data.pop('class', 'ERROR') # gpsd data package errors are also 'ERROR'.
package = getattr(self, package_name, package_name) # packages are named for JSON object class
fresh_data = json.loads(
gpsd_socket_response
) # The reserved word 'class' is popped from JSON object class
package_name = fresh_data.pop(
"class", "ERROR"
) # gpsd data package errors are also 'ERROR'.
package = getattr(
self, package_name, package_name
) # packages are named for JSON object class
for key in package.keys():
package[key] = fresh_data.get(key, 'n/a') # Restores 'n/a' if key is absent in the socket response
package[key] = fresh_data.get(
key, "n/a"
) # Restores 'n/a' if key is absent in the socket response
except AttributeError: # 'str' object has no attribute 'keys'
logging.error("GPSD Parser - There is an unexpected exception in DataStream.unpack.")
logging.error(
"GPSD Parser - There is an unexpected exception in DataStream.unpack."
)
return
except (ValueError, KeyError) as error:
@ -194,14 +291,12 @@ class DataStream(object):
class GPSDAdaptor(object):
''' Connect to a GPSD instance, and pass data onto a callback function '''
""" Connect to a GPSD instance, and pass data onto a callback function """
def __init__(self,
hostname = '127.0.0.1',
port = 2947,
callback = None,
update_decimation = 30):
'''
def __init__(
self, hostname="127.0.0.1", port=2947, callback=None, update_decimation=30
):
"""
Initialize a GPSAdaptor object.
This class uses the GPSDSocket class to connect to a GPSD instance,
@ -212,7 +307,7 @@ class GPSDAdaptor(object):
port (int): GPSD listen port (default = 2947)
callback (function): Callback to pass appropriately formatted dictionary data to.
update_decimation (int): Only pass updates to the callback every X samples.
'''
"""
self.hostname = hostname
self.port = port
@ -221,14 +316,12 @@ class GPSDAdaptor(object):
self.update_decimation = update_decimation
self.update_counter = 0
self.gpsd_thread_running = False
self.gpsd_thread = None
self.start()
def start(self):
''' Start the GPSD thread '''
""" Start the GPSD thread """
if self.gpsd_thread != None:
return
else:
@ -236,21 +329,18 @@ class GPSDAdaptor(object):
self.gpsd_thread = Thread(target=self.gpsd_process_thread)
self.gpsd_thread.start()
def close(self):
''' Stop the GPSD thread. '''
""" Stop the GPSD thread. """
self.gpsd_thread_running = False
# Wait for the thread to close.
if self.gpsd_thread != None:
self.gpsd_thread.join()
def send_to_callback(self, data):
'''
"""
Send the current GPS data snapshot onto the callback function,
if one exists.
'''
"""
# Attempt to pass it onto the callback function.
if self.callback != None:
@ -260,24 +350,25 @@ class GPSDAdaptor(object):
traceback.print_exc()
logging.error("GPSD - Error Passing data to callback - %s" % str(e))
def gpsd_process_thread(self):
''' Attempt to connect to a GPSD instance, and read position information '''
""" Attempt to connect to a GPSD instance, and read position information """
while self.gpsd_thread_running:
# Attempt to connect.
_gpsd_socket = GPSDSocket()
_data_stream = DataStream()
_success = _gpsd_socket.connect(host = self.hostname, port = self.port)
_success = _gpsd_socket.connect(host=self.hostname, port=self.port)
# If we could not connect, wait and try again.
if not _success:
logging.error("GPSD - Connect failed. Waiting 10 seconds before re-trying.")
logging.error(
"GPSD - Connect failed. Waiting 10 seconds before re-trying."
)
time.sleep(10)
continue
# Start watching for data.
_gpsd_socket.watch(gpsd_protocol = 'json')
_gpsd_socket.watch(gpsd_protocol="json")
logging.info("GPSD - Connected to GPSD instance at %s" % self.hostname)
while self.gpsd_thread_running:
@ -285,8 +376,7 @@ class GPSDAdaptor(object):
# If this isn't the case, we should close the connection and re-connect.
_gpsd_data = _gpsd_socket.next(timeout=10)
if _gpsd_data == None or _gpsd_data == '':
if _gpsd_data == None or _gpsd_data == "":
logging.error("GPSD - No data received. Attempting to reconnect.")
# Break out of this loop back to the connection loop.
@ -298,24 +388,24 @@ class GPSDAdaptor(object):
# Extract the Time-Position-Velocity report.
# This will have fields as defined in: http://www.catb.org/gpsd/gpsd_json.html
_TPV = _data_stream.TPV
if _TPV['lat'] == 'n/a' or _TPV['lon'] == 'n/a':
if _TPV["lat"] == "n/a" or _TPV["lon"] == "n/a":
# No position data. Continue.
continue
else:
# Produce output data structure.
if _TPV['speed'] != 'n/a':
_speed = _TPV['speed']
if _TPV["speed"] != "n/a":
_speed = _TPV["speed"]
else:
_speed = 0.0
_gps_state = {
'type': 'GPS',
'latitude': _TPV['lat'],
'longitude': _TPV['lon'],
'altitude': _TPV['alt'],
'speed': _speed,
'valid': True
"type": "GPS",
"latitude": _TPV["lat"],
"longitude": _TPV["lon"],
"altitude": _TPV["alt"],
"speed": _speed,
"valid": True,
}
if self.update_counter % self.update_decimation == 0:
@ -323,7 +413,6 @@ class GPSDAdaptor(object):
self.update_counter += 1
# Close the GPSD connection.
try:
_gpsd_socket.close()
@ -331,21 +420,19 @@ class GPSDAdaptor(object):
logging.error("GPSD - Error when closing connection: %s" % str(e))
if __name__ == '__main__':
if __name__ == "__main__":
def print_dict(data):
print(data)
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
_gpsd = GPSDAdaptor(callback=print_dict)
time.sleep(30)
_gpsd.close()
# gpsd_socket = GPSDSocket()
# data_stream = DataStream()
# gpsd_socket.connect()

Wyświetl plik

@ -17,6 +17,7 @@ from base64 import b64encode
from hashlib import sha256
from threading import Thread, Lock
from . import __version__ as auto_rx_version
try:
# Python 2
from Queue import Queue
@ -25,9 +26,9 @@ except ImportError:
from queue import Queue
# These get replaced out after init
url_habitat_uuids=""
url_habitat_db=""
habitat_url=""
url_habitat_uuids = ""
url_habitat_db = ""
habitat_url = ""
# CRC16 function
def crc16_ccitt(data):
@ -42,9 +43,9 @@ def crc16_ccitt(data):
str: Resultant checksum as two bytes of hexadecimal.
"""
crc16 = crcmod.predefined.mkCrcFun('crc-ccitt-false')
crc16 = crcmod.predefined.mkCrcFun("crc-ccitt-false")
# Encode to ASCII.
_data_ascii = data.encode('ascii')
_data_ascii = data.encode("ascii")
return hex(crc16(_data_ascii))[2:].upper().zfill(4)
@ -62,56 +63,58 @@ def sonde_telemetry_to_sentence(telemetry, payload_callsign=None, comment=None):
"""
# We only want HH:MM:SS for uploading to habitat.
_short_time = telemetry['datetime_dt'].strftime("%H:%M:%S")
_short_time = telemetry["datetime_dt"].strftime("%H:%M:%S")
if payload_callsign is None:
# If we haven't been supplied a callsign, we generate one based on the serial number.
_callsign = "RS_" + telemetry['id']
_callsign = "RS_" + telemetry["id"]
else:
_callsign = payload_callsign
_sentence = "$$%s,%d,%s,%.5f,%.5f,%d,%.1f,%.1f,%.1f" % (_callsign,
telemetry['frame'],
_sentence = "$$%s,%d,%s,%.5f,%.5f,%d,%.1f,%.1f,%.1f" % (
_callsign,
telemetry["frame"],
_short_time,
telemetry['lat'],
telemetry['lon'],
int(telemetry['alt']), # Round to the nearest metre.
telemetry['vel_h'],
telemetry['temp'],
telemetry['humidity'])
telemetry["lat"],
telemetry["lon"],
int(telemetry["alt"]), # Round to the nearest metre.
telemetry["vel_h"],
telemetry["temp"],
telemetry["humidity"],
)
if 'f_centre' in telemetry:
if "f_centre" in telemetry:
# We have an estimate of the sonde's centre frequency from the modem, use this in place of
# the RX frequency.
# Round to 1 kHz
_freq = round(telemetry['f_centre']/1000.0)
_freq = round(telemetry["f_centre"] / 1000.0)
# Convert to MHz.
_freq = "%.3f MHz" % (_freq/1e3)
_freq = "%.3f MHz" % (_freq / 1e3)
else:
# Otherwise, use the normal frequency.
_freq = telemetry['freq']
_freq = telemetry["freq"]
# Add in a comment field, containing the sonde type, serial number, and frequency.
_sentence += ",%s %s %s" % (telemetry['type'], telemetry['id'], _freq)
_sentence += ",%s %s %s" % (telemetry["type"], telemetry["id"], _freq)
# Add in pressure data, if valid (not -1)
if telemetry['pressure'] > 0.0:
_sentence += " %.1fhPa" % telemetry['pressure']
if telemetry["pressure"] > 0.0:
_sentence += " %.1fhPa" % telemetry["pressure"]
# Check for Burst/Kill timer data, and add in.
if 'bt' in telemetry:
if (telemetry['bt'] != -1) and (telemetry['bt'] != 65535):
_sentence += " BT %s" % time.strftime("%H:%M:%S", time.gmtime(telemetry['bt']))
if "bt" in telemetry:
if (telemetry["bt"] != -1) and (telemetry["bt"] != 65535):
_sentence += " BT %s" % time.strftime(
"%H:%M:%S", time.gmtime(telemetry["bt"])
)
# Add in battery voltage, if the field is valid (e.g. not -1)
if telemetry['batt'] > 0.0:
_sentence += " %.1fV" % telemetry['batt']
if telemetry["batt"] > 0.0:
_sentence += " %.1fV" % telemetry["batt"]
# Add on any custom comment data if provided.
if comment != None:
comment = comment.replace(',','_')
comment = comment.replace(",", "_")
_sentence += " " + comment
_checksum = crc16_ccitt(_sentence[2:])
@ -127,6 +130,7 @@ callsign_init = False
uuids = []
def check_callsign(callsign, timeout=10):
"""
Check if a payload document exists for a given callsign.
@ -154,11 +158,14 @@ def check_callsign(callsign, timeout=10):
_r_json = _r.json()
# Read out the list of positions for the requested callsign
_positions = _r_json['positions']['position']
_positions = _r_json["positions"]["position"]
# If there is at least one position returned, we assume there is a valid payload document.
if len(_positions) > 0:
logging.info("Habitat - Callsign %s already present in Habitat DB, not creating new payload doc." % callsign)
logging.info(
"Habitat - Callsign %s already present in Habitat DB, not creating new payload doc."
% callsign
)
return True
else:
# Otherwise, we don't, and go create one.
@ -166,11 +173,13 @@ def check_callsign(callsign, timeout=10):
except Exception as e:
# Handle errors with JSON parsing.
logging.error("Habitat - Unable to request payload positions from legacy-snus.habhub.org - %s" % str(e))
logging.error(
"Habitat - Unable to request payload positions from legacy-snus.habhub.org - %s"
% str(e)
)
return False
# Keep an internal cache for which payload docs we've created so we don't spam couchdb with updates
payload_config_cache = {}
@ -179,18 +188,17 @@ def ISOStringNow():
return "%sZ" % datetime.datetime.utcnow().isoformat()
def initPayloadDoc(serial, description="Meteorology Radiosonde", frequency=401.5, timeout=20):
def initPayloadDoc(
serial, description="Meteorology Radiosonde", frequency=401.5, timeout=20
):
"""Creates a payload in Habitat for the radiosonde before uploading"""
global url_habitat_db
payload_data = {
"type": "payload_configuration",
"name": serial,
"time_created": ISOStringNow(),
"metadata": {
"description": description
},
"metadata": {"description": description},
"transmissions": [
{
"frequency": frequency,
@ -201,85 +209,64 @@ def initPayloadDoc(serial, description="Meteorology Radiosonde", frequency=401.5
"stop": 2,
"shift": 350,
"baud": 50,
"description": "DUMMY ENTRY, DATA IS VIA radiosonde_auto_rx"
"description": "DUMMY ENTRY, DATA IS VIA radiosonde_auto_rx",
}
],
"sentences": [
{
"protocol": "UKHAS",
"callsign": serial,
"checksum":"crc16-ccitt",
"fields":[
{
"name": "sentence_id",
"sensor": "base.ascii_int"
},
{
"name": "time",
"sensor": "stdtelem.time"
},
"checksum": "crc16-ccitt",
"fields": [
{"name": "sentence_id", "sensor": "base.ascii_int"},
{"name": "time", "sensor": "stdtelem.time"},
{
"name": "latitude",
"sensor": "stdtelem.coordinate",
"format": "dd.dddd"
"format": "dd.dddd",
},
{
"name": "longitude",
"sensor": "stdtelem.coordinate",
"format": "dd.dddd"
"format": "dd.dddd",
},
{
"name": "altitude",
"sensor": "base.ascii_int"
},
{
"name": "speed",
"sensor": "base.ascii_float"
},
{
"name": "temperature_external",
"sensor": "base.ascii_float"
},
{
"name": "humidity",
"sensor": "base.ascii_float"
},
{
"name": "comment",
"sensor": "base.string"
}
{"name": "altitude", "sensor": "base.ascii_int"},
{"name": "speed", "sensor": "base.ascii_float"},
{"name": "temperature_external", "sensor": "base.ascii_float"},
{"name": "humidity", "sensor": "base.ascii_float"},
{"name": "comment", "sensor": "base.string"},
],
"filters":
{
"filters": {
"post": [
{
"filter": "common.invalid_location_zero",
"type": "normal"
}
{"filter": "common.invalid_location_zero", "type": "normal"}
]
},
"description": "radiosonde_auto_rx to Habitat Bridge"
"description": "radiosonde_auto_rx to Habitat Bridge",
}
]
],
}
# Perform the POST request to the Habitat DB.
try:
_r = requests.post(url_habitat_db, json=payload_data, timeout=timeout)
if _r.json()['ok'] is True:
if _r.json()["ok"] is True:
logging.info("Habitat - Created a payload document for %s" % serial)
return True
else:
logging.error("Habitat - Failed to create a payload document for %s" % serial)
logging.error(
"Habitat - Failed to create a payload document for %s" % serial
)
return False
except Exception as e:
logging.error("Habitat - Failed to create a payload document for %s - %s" % (serial, str(e)))
logging.error(
"Habitat - Failed to create a payload document for %s - %s"
% (serial, str(e))
)
return False
def postListenerData(doc, timeout=10):
global uuids, url_habitat_db
# do we have at least one uuid, if not go get more
@ -288,12 +275,12 @@ def postListenerData(doc, timeout=10):
# Attempt to add UUID and time data to document.
try:
doc['_id'] = uuids.pop()
doc["_id"] = uuids.pop()
except IndexError:
logging.error("Habitat - Unable to post listener data - no UUIDs available.")
return False
doc['time_uploaded'] = ISOStringNow()
doc["time_uploaded"] = ISOStringNow()
try:
_r = requests.post(url_habitat_db, json=doc, timeout=timeout)
@ -311,11 +298,13 @@ def fetchUuids(timeout=10):
while _retries > 0:
try:
_r = requests.get(url_habitat_uuids % 10, timeout=timeout)
uuids.extend(_r.json()['uuids'])
#logging.debug("Habitat - Got UUIDs")
uuids.extend(_r.json()["uuids"])
# logging.debug("Habitat - Got UUIDs")
return
except Exception as e:
logging.error("Habitat - Unable to fetch UUIDs, retrying in 10 seconds - %s" % str(e))
logging.error(
"Habitat - Unable to fetch UUIDs, retrying in 10 seconds - %s" % str(e)
)
time.sleep(10)
_retries = _retries - 1
continue
@ -324,28 +313,28 @@ def fetchUuids(timeout=10):
return
def initListenerCallsign(callsign, version='', antenna=''):
def initListenerCallsign(callsign, version="", antenna=""):
doc = {
'type': 'listener_information',
'time_created' : ISOStringNow(),
'data': {
'callsign': callsign,
'antenna': antenna,
'radio': 'radiosonde_auto_rx %s' % version,
}
}
"type": "listener_information",
"time_created": ISOStringNow(),
"data": {
"callsign": callsign,
"antenna": antenna,
"radio": "radiosonde_auto_rx %s" % version,
},
}
resp = postListenerData(doc)
if resp is True:
#logging.debug("Habitat - Listener Callsign Initialized.")
# logging.debug("Habitat - Listener Callsign Initialized.")
return True
else:
logging.error("Habitat - Unable to initialize callsign.")
return False
def uploadListenerPosition(callsign, lat, lon, version='', antenna=''):
def uploadListenerPosition(callsign, lat, lon, version="", antenna=""):
""" Initializer Listener Callsign, and upload Listener Position """
# Attempt to initialize the listeners callsign
@ -356,16 +345,16 @@ def uploadListenerPosition(callsign, lat, lon, version='', antenna=''):
return False
doc = {
'type': 'listener_telemetry',
'time_created': ISOStringNow(),
'data': {
'callsign': callsign,
'chase': False,
'latitude': lat,
'longitude': lon,
'altitude': 0,
'speed': 0,
}
"type": "listener_telemetry",
"time_created": ISOStringNow(),
"data": {
"callsign": callsign,
"chase": False,
"latitude": lat,
"longitude": lon,
"altitude": 0,
"speed": 0,
},
}
# post position to habitat
@ -382,8 +371,9 @@ def uploadListenerPosition(callsign, lat, lon, version='', antenna=''):
# Habitat Uploader Class
#
class HabitatUploader(object):
'''
"""
Queued Habitat Telemetry Uploader class
This performs uploads to the Habitat servers, and also handles generation of flight documents.
@ -397,26 +387,38 @@ class HabitatUploader(object):
it is immediately emptied, to avoid upload of out-of-date packets.
Note that this uploader object is intended to handle telemetry from multiple sondes
'''
"""
# We require the following fields to be present in the incoming telemetry dictionary data
REQUIRED_FIELDS = ['frame', 'id', 'datetime', 'lat', 'lon', 'alt', 'temp', 'type', 'freq', 'freq_float', 'datetime_dt']
REQUIRED_FIELDS = [
"frame",
"id",
"datetime",
"lat",
"lon",
"alt",
"temp",
"type",
"freq",
"freq_float",
"datetime_dt",
]
def __init__(self,
user_callsign = 'N0CALL',
station_position = (0.0,0.0,0.0),
user_antenna = "",
synchronous_upload_time = 30,
callsign_validity_threshold = 2,
upload_queue_size = 16,
upload_timeout = 10,
upload_retries = 5,
upload_retry_interval = 0.25,
user_position_update_rate = 6,
inhibit = False,
url = "http://habitat.sondehub.org/"
):
def __init__(
self,
user_callsign="N0CALL",
station_position=(0.0, 0.0, 0.0),
user_antenna="",
synchronous_upload_time=30,
callsign_validity_threshold=2,
upload_queue_size=16,
upload_timeout=10,
upload_retries=5,
upload_retry_interval=0.25,
user_position_update_rate=6,
inhibit=False,
url="http://habitat.sondehub.org/",
):
""" Initialise a Habitat Uploader object.
Args:
@ -496,8 +498,6 @@ class HabitatUploader(object):
self.timer_thread = Thread(target=self.upload_timer)
self.timer_thread.start()
def user_position_upload(self):
""" Upload the the station position to Habitat. """
if self.station_position == None:
@ -506,7 +506,13 @@ class HabitatUploader(object):
return False
if (self.station_position[0] != 0.0) or (self.station_position[1] != 0.0):
_success = uploadListenerPosition(self.user_callsign, self.station_position[0], self.station_position[1], version=auto_rx_version, antenna=self.user_antenna)
_success = uploadListenerPosition(
self.user_callsign,
self.station_position[0],
self.station_position[1],
version=auto_rx_version,
antenna=self.user_antenna,
)
self.last_user_position_upload = time.time()
return _success
else:
@ -514,41 +520,45 @@ class HabitatUploader(object):
self.last_user_position_upload = time.time()
return False
def habitat_upload(self, sentence):
''' Upload a UKHAS-standard telemetry sentence to Habitat
""" Upload a UKHAS-standard telemetry sentence to Habitat
Args:
sentence (str): The UKHAS-standard telemetry sentence to upload.
'''
"""
if self.inhibit:
self.log_info("Upload inhibited.")
return
# Generate payload to be uploaded
_sentence_b64 = b64encode(sentence.encode('ascii')) # Encode to ASCII to be able to perform B64 encoding...
_sentence_b64 = b64encode(
sentence.encode("ascii")
) # Encode to ASCII to be able to perform B64 encoding...
_date = datetime.datetime.utcnow().isoformat("T") + "Z"
_user_call = self.user_callsign
_data = {
"type": "payload_telemetry",
"data": {
"_raw": _sentence_b64.decode('ascii') # ... but decode back to a string to enable JSON serialisation.
},
"_raw": _sentence_b64.decode(
"ascii"
) # ... but decode back to a string to enable JSON serialisation.
},
"receivers": {
_user_call: {
"time_created": _date,
"time_uploaded": _date,
},
},
_user_call: {"time_created": _date, "time_uploaded": _date,},
},
}
# The URL to upload to.
_url = habitat_url + "habitat/_design/payload_telemetry/_update/add_listener/%s" % sha256(_sentence_b64).hexdigest()
_url = (
habitat_url
+ "habitat/_design/payload_telemetry/_update/add_listener/%s"
% sha256(_sentence_b64).hexdigest()
)
# Delay for a random amount of time between 0 and upload_retry_interval*2 seconds.
time.sleep(random.random()*self.upload_retry_interval*2.0)
time.sleep(random.random() * self.upload_retry_interval * 2.0)
_retries = 0
@ -560,33 +570,44 @@ class HabitatUploader(object):
# Run the request.
try:
headers = {"User-Agent": "autorx-" + auto_rx_version}
_req = requests.put(_url, data=json.dumps(_data), timeout=self.upload_timeout, headers=headers)
_req = requests.put(
_url,
data=json.dumps(_data),
timeout=self.upload_timeout,
headers=headers,
)
except Exception as e:
self.log_error("Upload Failed: %s" % str(e))
break
if _req.status_code == 201 or _req.status_code == 403:
# 201 = Success, 403 = Success, sentence has already seen by others.
self.log_info("Uploaded sentence to Habitat successfully: %s" % sentence.strip())
self.log_info(
"Uploaded sentence to Habitat successfully: %s" % sentence.strip()
)
_upload_success = True
break
elif _req.status_code == 409:
# 409 = Upload conflict (server busy). Sleep for a moment, then retry.
self.log_debug("Upload conflict.. retrying.")
time.sleep(random.random()*self.upload_retry_interval)
time.sleep(random.random() * self.upload_retry_interval)
_retries += 1
else:
self.log_error("Error uploading to Habitat. Status Code: %d %s." % (_req.status_code, _req.text))
self.log_error(
"Error uploading to Habitat. Status Code: %d %s."
% (_req.status_code, _req.text)
)
break
if _retries == self.upload_retries:
self.log_error("Upload conflict not resolved with %d retries." % self.upload_retries)
self.log_error(
"Upload conflict not resolved with %d retries." % self.upload_retries
)
return
def habitat_upload_thread(self):
''' Handle uploading of packets to Habitat '''
""" Handle uploading of packets to Habitat """
self.log_debug("Started Habitat Uploader Thread.")
@ -598,7 +619,9 @@ class HabitatUploader(object):
while not self.habitat_upload_queue.empty():
sentence = self.habitat_upload_queue.get()
self.log_warning("Uploader queue was full - possible connectivity issue.")
self.log_warning(
"Uploader queue was full - possible connectivity issue."
)
else:
# Otherwise, get the first item in the queue.
sentence = self.habitat_upload_queue.get()
@ -612,7 +635,6 @@ class HabitatUploader(object):
self.log_debug("Stopped Habitat Uploader Thread.")
def handle_telem_dict(self, telem, immediate=False):
# Try and convert it to a UKHAS sentence
try:
@ -621,13 +643,12 @@ class HabitatUploader(object):
self.log_error("Error converting telemetry to sentence - %s" % str(e))
return
_callsign = "RS_" + telem['id']
_callsign = "RS_" + telem["id"]
# Wait for the upload_lock to be available, to ensure we don't end up with
# race conditions resulting in multiple payload docs being created.
self.upload_lock.acquire()
# Habitat Payload document creation has been disabled as of 2020-03-20.
# We now use a common payload document for all radiosonde telemetry.
#
@ -645,7 +666,7 @@ class HabitatUploader(object):
# _created = True
# else:
# _created = initPayloadDoc(_callsign, description="Meteorology Radiosonde", frequency=telem['freq_float'])
# if _created:
# self.observed_payloads[telem['id']]['habitat_document'] = True
# else:
@ -654,7 +675,10 @@ class HabitatUploader(object):
# return
if immediate:
self.log_info("Performing immediate upload for first telemetry sentence of %s." % telem['id'])
self.log_info(
"Performing immediate upload for first telemetry sentence of %s."
% telem["id"]
)
self.habitat_upload(_sentence)
else:
@ -666,22 +690,20 @@ class HabitatUploader(object):
self.upload_lock.release()
def upload_timer(self):
""" Add packets to the habitat upload queue if it is time for us to upload. """
while self.timer_thread_running:
if int(time.time()) % self.synchronous_upload_time == 0:
# Time to upload!
# Time to upload!
for _id in self.observed_payloads.keys():
# If no data, continue...
if self.observed_payloads[_id]['data'].empty():
if self.observed_payloads[_id]["data"].empty():
continue
else:
# Otherwise, dump the queue and keep the latest telemetry.
while not self.observed_payloads[_id]['data'].empty():
_telem = self.observed_payloads[_id]['data'].get()
while not self.observed_payloads[_id]["data"].empty():
_telem = self.observed_payloads[_id]["data"].get()
self.handle_telem_dict(_telem)
@ -691,7 +713,6 @@ class HabitatUploader(object):
# Not yet time to upload, wait for a bit.
time.sleep(0.1)
def process_queue(self):
""" Process packets from the input queue.
@ -706,45 +727,56 @@ class HabitatUploader(object):
# Grab latest telem dictionary.
_telem = self.input_queue.get_nowait()
_id = _telem['id']
_id = _telem["id"]
if _id not in self.observed_payloads:
# We haven't seen this ID before, so create a new dictionary entry for it.
self.observed_payloads[_id] = {'count':1, 'data':Queue(), 'habitat_document': False, 'first_uploaded': False}
self.log_debug("New Payload %s. Not observed enough to allow upload." % _id)
self.observed_payloads[_id] = {
"count": 1,
"data": Queue(),
"habitat_document": False,
"first_uploaded": False,
}
self.log_debug(
"New Payload %s. Not observed enough to allow upload." % _id
)
# However, we don't yet add anything to the queue for this payload...
else:
# We have seen this payload before!
# Increment the 'seen' counter.
self.observed_payloads[_id]['count'] += 1
self.observed_payloads[_id]["count"] += 1
# If we have seen this particular ID enough times, add the data to the ID's queue.
if self.observed_payloads[_id]['count'] >= self.callsign_validity_threshold:
if (
self.observed_payloads[_id]["count"]
>= self.callsign_validity_threshold
):
# If this is the first time we have observed this payload, immediately upload the first position we got.
if self.observed_payloads[_id]['first_uploaded'] == False:
if self.observed_payloads[_id]["first_uploaded"] == False:
# Because receiving balloon telemetry appears to be a competition, immediately upload the
# first valid position received.
self.handle_telem_dict(_telem, immediate=True)
self.observed_payloads[_id]['first_uploaded'] = True
self.observed_payloads[_id]["first_uploaded"] = True
else:
# Otherwise, add the telemetry to the upload queue
self.observed_payloads[_id]['data'].put(_telem)
self.observed_payloads[_id]["data"].put(_telem)
else:
self.log_debug("Payload ID %s not observed enough to allow upload." % _id)
self.log_debug(
"Payload ID %s not observed enough to allow upload." % _id
)
# If we haven't uploaded our station position recently, re-upload it.
if (time.time() - self.last_user_position_upload) > self.user_position_update_rate*3600:
if (
time.time() - self.last_user_position_upload
) > self.user_position_update_rate * 3600:
self.user_position_upload()
time.sleep(0.1)
def add(self, telemetry):
""" Add a dictionary of telemetry to the input queue.
@ -754,8 +786,8 @@ class HabitatUploader(object):
"""
# Discard any telemetry which is indicated to be encrypted.
if 'encrypted' in telemetry:
if telemetry['encrypted'] == True:
if "encrypted" in telemetry:
if telemetry["encrypted"] == True:
return
# Check the telemetry dictionary contains the required fields.
@ -770,14 +802,12 @@ class HabitatUploader(object):
else:
self.log_error("Processing not running, discarding.")
def update_station_position(self, lat, lon, alt):
""" Update the internal station position record. Used when determining the station position by GPSD """
self.station_position = (lat, lon, alt)
def close(self):
''' Shutdown uploader and processing threads. '''
""" Shutdown uploader and processing threads. """
self.log_debug("Waiting for threads to close...")
self.input_processing_running = False
self.timer_thread_running = False
@ -793,7 +823,6 @@ class HabitatUploader(object):
if self.input_thread is not None:
self.input_thread.join()
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
@ -801,7 +830,6 @@ class HabitatUploader(object):
"""
logging.debug("Habitat - %s" % line)
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
@ -809,7 +837,6 @@ class HabitatUploader(object):
"""
logging.info("Habitat - %s" % line)
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:
@ -817,14 +844,9 @@ class HabitatUploader(object):
"""
logging.error("Habitat - %s" % line)
def log_warning(self, line):
""" Helper function to log a warning message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.warning("Habitat - %s" % line)

Wyświetl plik

@ -11,6 +11,7 @@ import logging
import os
import time
from threading import Thread
try:
# Python 2
from Queue import Queue
@ -19,7 +20,6 @@ except ImportError:
from queue import Queue
class TelemetryLogger(object):
""" Radiosonde Telemetry Logger Class.
@ -36,12 +36,27 @@ class TelemetryLogger(object):
FILE_ACTIVITY_TIMEOUT = 300
# We require the following fields to be present in the input telemetry dict.
REQUIRED_FIELDS = ['frame', 'id', 'datetime', 'lat', 'lon', 'alt', 'temp', 'humidity', 'pressure', 'type', 'freq', 'datetime_dt', 'vel_v', 'vel_h', 'heading']
REQUIRED_FIELDS = [
"frame",
"id",
"datetime",
"lat",
"lon",
"alt",
"temp",
"humidity",
"pressure",
"type",
"freq",
"datetime_dt",
"vel_v",
"vel_h",
"heading",
]
LOG_HEADER = "timestamp,serial,frame,lat,lon,alt,vel_v,vel_h,heading,temp,humidity,pressure,type,freq_mhz,snr,f_error_hz,sats,batt_v,burst_timer,aux_data\n"
def __init__(self,
log_directory = "./log"):
def __init__(self, log_directory="./log"):
""" Initialise and start a sonde logger.
Args:
@ -60,13 +75,11 @@ class TelemetryLogger(object):
# Input Queue.
self.input_queue = Queue()
# Start queue processing thread.
self.input_processing_running = True
self.log_process_thread = Thread(target=self.process_queue)
self.log_process_thread.start()
def add(self, telemetry):
""" Add a dictionary of telemetry to the input queue.
@ -87,7 +100,6 @@ class TelemetryLogger(object):
else:
self.log_error("Processing not running, discarding.")
def process_queue(self):
""" Process data from the input queue, and write telemetry to log files.
"""
@ -111,7 +123,6 @@ class TelemetryLogger(object):
self.log_info("Stopped Telemetry Logger Thread.")
def telemetry_to_string(self, telemetry):
""" Convert a telemetry dictionary to a CSV string.
@ -120,58 +131,60 @@ class TelemetryLogger(object):
"""
# timestamp,serial,frame,lat,lon,alt,vel_v,vel_h,heading,temp,humidity,type,freq,other
_log_line = "%s,%s,%d,%.5f,%.5f,%.1f,%.1f,%.1f,%.1f,%.1f,%.1f,%.1f,%s,%.3f" % (
telemetry['datetime'],
telemetry['id'],
telemetry['frame'],
telemetry['lat'],
telemetry['lon'],
telemetry['alt'],
telemetry['vel_v'],
telemetry['vel_h'],
telemetry['heading'],
telemetry['temp'],
telemetry['humidity'],
telemetry['pressure'],
telemetry['type'],
telemetry['freq_float'])
telemetry["datetime"],
telemetry["id"],
telemetry["frame"],
telemetry["lat"],
telemetry["lon"],
telemetry["alt"],
telemetry["vel_v"],
telemetry["vel_h"],
telemetry["heading"],
telemetry["temp"],
telemetry["humidity"],
telemetry["pressure"],
telemetry["type"],
telemetry["freq_float"],
)
# Other fields that may not always be present.
if 'snr' in telemetry:
_log_line += ",%.1f" % telemetry['snr']
if "snr" in telemetry:
_log_line += ",%.1f" % telemetry["snr"]
else:
_log_line += ",-99.0"
if 'f_error' in telemetry:
_log_line += ",%d" % int(telemetry['f_error'])
if "f_error" in telemetry:
_log_line += ",%d" % int(telemetry["f_error"])
else:
_log_line += ",0"
if 'sats' in telemetry:
_log_line += ",%d" % telemetry['sats']
if "sats" in telemetry:
_log_line += ",%d" % telemetry["sats"]
else:
_log_line += ",-1"
if 'batt' in telemetry:
_log_line += ",%.1f" % telemetry['batt']
if "batt" in telemetry:
_log_line += ",%.1f" % telemetry["batt"]
else:
_log_line += ",-1"
# Check for Burst/Kill timer data, and add in.
if 'bt' in telemetry:
if (telemetry['bt'] != -1) and (telemetry['bt'] != 65535):
_log_line += ",%s" % time.strftime("%H:%M:%S", time.gmtime(telemetry['bt']))
if "bt" in telemetry:
if (telemetry["bt"] != -1) and (telemetry["bt"] != 65535):
_log_line += ",%s" % time.strftime(
"%H:%M:%S", time.gmtime(telemetry["bt"])
)
else:
_log_line += ",-1"
else:
_log_line += ",-1"
# Add Aux data, if it exists.
if 'aux' in telemetry:
_log_line += ",%s" % telemetry['aux'].strip()
if "aux" in telemetry:
_log_line += ",%s" % telemetry["aux"].strip()
else:
_log_line += ",-1"
# Terminate the log line.
_log_line += "\n"
@ -184,9 +197,8 @@ class TelemetryLogger(object):
telemetry (dict): Telemetry dictionary to process.
"""
_id = telemetry['id']
_type = telemetry['type']
_id = telemetry["id"]
_type = telemetry["type"]
# If there is no log open for the current ID check to see if there is an existing (closed) log file, and open it.
if _id not in self.open_logs:
@ -197,35 +209,39 @@ class TelemetryLogger(object):
_log_file_name = _existing_files[0]
self.log_info("Using existing log file: %s" % _log_file_name)
# Create entry in open logs dictionary
self.open_logs[_id] = {'log':open(_log_file_name,'a'), 'last_time':time.time()}
self.open_logs[_id] = {
"log": open(_log_file_name, "a"),
"last_time": time.time(),
}
else:
# Create a new log file.
_log_suffix = "%s_%s_%s_%d_sonde.log" % (
datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S"),
_id,
_type,
int(telemetry['freq_float']*1e3) # Convert frequency to kHz
)
int(telemetry["freq_float"] * 1e3), # Convert frequency to kHz
)
_log_file_name = os.path.join(self.log_directory, _log_suffix)
self.log_info("Opening new log file: %s" % _log_file_name)
# Create entry in open logs dictionary
self.open_logs[_id] = {'log':open(_log_file_name,'a'), 'last_time':time.time()}
self.open_logs[_id] = {
"log": open(_log_file_name, "a"),
"last_time": time.time(),
}
# Write in a header line.
self.open_logs[_id]['log'].write(self.LOG_HEADER)
self.open_logs[_id]["log"].write(self.LOG_HEADER)
# Produce log file sentence.
_log_line = self.telemetry_to_string(telemetry)
# Write out to log.
self.open_logs[_id]['log'].write(_log_line)
self.open_logs[_id]['log'].flush()
self.open_logs[_id]["log"].write(_log_line)
self.open_logs[_id]["log"].flush()
# Update the last_time field.
self.open_logs[_id]['last_time'] = time.time()
self.open_logs[_id]["last_time"] = time.time()
self.log_debug("Wrote line: %s" % _log_line.strip())
def cleanup_logs(self):
""" Close any open logs that have not had telemetry added in X seconds. """
@ -233,22 +249,21 @@ class TelemetryLogger(object):
for _id in self.open_logs.copy().keys():
try:
if _now > (self.open_logs[_id]['last_time'] + self.FILE_ACTIVITY_TIMEOUT):
if _now > (
self.open_logs[_id]["last_time"] + self.FILE_ACTIVITY_TIMEOUT
):
# Flush and close the log file, and pop this element from the dictionary.
self.open_logs[_id]['log'].flush()
self.open_logs[_id]['log'].close()
self.open_logs[_id]["log"].flush()
self.open_logs[_id]["log"].close()
self.open_logs.pop(_id, None)
self.log_info("Closed log file for %s" % _id)
except Exception as e:
self.log_error("Error closing log for %s - %s" % (_id, str(e)))
def close(self):
""" Close input processing thread. """
self.input_processing_running = False
def running(self):
""" Check if the logging thread is running.
@ -257,7 +272,6 @@ class TelemetryLogger(object):
"""
return self.input_processing_running
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
@ -265,7 +279,6 @@ class TelemetryLogger(object):
"""
logging.debug("Telemetry Logger - %s" % line)
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
@ -273,7 +286,6 @@ class TelemetryLogger(object):
"""
logging.info("Telemetry Logger - %s" % line)
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:

Wyświetl plik

@ -11,6 +11,7 @@ import logging
import socket
import time
from threading import Thread
try:
# Python 2
from Queue import Queue
@ -34,18 +35,30 @@ class OziUploader(object):
"""
# We require the following fields to be present in the incoming telemetry dictionary data
REQUIRED_FIELDS = ['frame', 'id', 'datetime', 'lat', 'lon', 'alt', 'temp', 'type', 'freq', 'freq_float', 'datetime_dt']
REQUIRED_FIELDS = [
"frame",
"id",
"datetime",
"lat",
"lon",
"alt",
"temp",
"type",
"freq",
"freq_float",
"datetime_dt",
]
# Extra fields we can pass on to other programs.
EXTRA_FIELDS = ['bt', 'humidity', 'sats', 'batt', 'snr', 'fest', 'f_centre', 'ppm']
EXTRA_FIELDS = ["bt", "humidity", "sats", "batt", "snr", "fest", "f_centre", "ppm"]
def __init__(self,
ozimux_port = None,
payload_summary_port = None,
update_rate = 5,
station = "auto_rx"
):
def __init__(
self,
ozimux_port=None,
payload_summary_port=None,
update_rate=5,
station="auto_rx",
):
""" Initialise an OziUploader Object.
Args:
@ -59,7 +72,7 @@ class OziUploader(object):
self.update_rate = update_rate
self.station = station
# Input Queue.
# Input Queue.
self.input_queue = Queue()
# Start the input queue processing thread.
@ -69,7 +82,6 @@ class OziUploader(object):
self.log_info("Started OziMux / Payload Summary Exporter")
def send_ozimux_telemetry(self, telemetry):
""" Send a packet of telemetry into the network in OziMux/OziPlotter-compatible format.
@ -78,14 +90,19 @@ class OziUploader(object):
"""
_short_time = telemetry['datetime_dt'].strftime("%H:%M:%S")
_sentence = "TELEMETRY,%s,%.5f,%.5f,%d\n" % (_short_time, telemetry['lat'], telemetry['lon'], telemetry['alt'])
_short_time = telemetry["datetime_dt"].strftime("%H:%M:%S")
_sentence = "TELEMETRY,%s,%.5f,%.5f,%d\n" % (
_short_time,
telemetry["lat"],
telemetry["lon"],
telemetry["alt"],
)
try:
_ozisock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
_ozisock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Set up socket for broadcast, and allow re-use of the address
_ozisock.setsockopt(socket.SOL_SOCKET,socket.SO_BROADCAST,1)
_ozisock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
_ozisock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Under OSX we also need to set SO_REUSEPORT to 1
try:
@ -94,19 +111,24 @@ class OziUploader(object):
pass
try:
_ozisock.sendto(_sentence.encode('ascii'),('<broadcast>',self.ozimux_port))
_ozisock.sendto(
_sentence.encode("ascii"), ("<broadcast>", self.ozimux_port)
)
# Catch any socket errors, that may occur when attempting to send to a broadcast address
# when there is no network connected. In this case, re-try and send to localhost instead.
except socket.error as e:
self.log_debug("Send to broadcast address failed, sending to localhost instead.")
_ozisock.sendto(_sentence.encode('ascii'),('127.0.0.1',self.ozimux_port))
self.log_debug(
"Send to broadcast address failed, sending to localhost instead."
)
_ozisock.sendto(
_sentence.encode("ascii"), ("127.0.0.1", self.ozimux_port)
)
_ozisock.close()
except Exception as e:
self.log_error("Failed to send OziMux packet: %s" % str(e))
def send_payload_summary(self, telemetry):
""" Send a payload summary message into the network via UDP broadcast.
@ -117,36 +139,35 @@ class OziUploader(object):
try:
# Prepare heading & speed fields, if they are provided in the incoming telemetry blob.
if 'heading' in telemetry.keys():
_heading = telemetry['heading']
if "heading" in telemetry.keys():
_heading = telemetry["heading"]
else:
_heading = -1
if 'vel_h' in telemetry.keys():
_speed = telemetry['vel_h']*3.6
if "vel_h" in telemetry.keys():
_speed = telemetry["vel_h"] * 3.6
else:
_speed = -1
# Generate 'short' time field.
_short_time = telemetry['datetime_dt'].strftime("%H:%M:%S")
_short_time = telemetry["datetime_dt"].strftime("%H:%M:%S")
packet = {
'type' : 'PAYLOAD_SUMMARY',
'station': self.station,
'callsign' : telemetry['id'],
'latitude' : telemetry['lat'],
'longitude' : telemetry['lon'],
'altitude' : telemetry['alt'],
'speed' : _speed,
'heading': _heading,
'time' : _short_time,
'comment' : 'Radiosonde',
"type": "PAYLOAD_SUMMARY",
"station": self.station,
"callsign": telemetry["id"],
"latitude": telemetry["lat"],
"longitude": telemetry["lon"],
"altitude": telemetry["alt"],
"speed": _speed,
"heading": _heading,
"time": _short_time,
"comment": "Radiosonde",
# Additional fields specifically for radiosondes
'model': telemetry['type'],
'freq': telemetry['freq'],
'temp': telemetry['temp'],
'frame': telemetry['frame']
"model": telemetry["type"],
"freq": telemetry["freq"],
"temp": telemetry["temp"],
"frame": telemetry["frame"],
}
# Add in any extra fields we may care about.
@ -154,13 +175,11 @@ class OziUploader(object):
if _field in telemetry:
packet[_field] = telemetry[_field]
# Set up our UDP socket
_s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
_s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
_s.settimeout(1)
# Set up socket for broadcast, and allow re-use of the address
_s.setsockopt(socket.SOL_SOCKET,socket.SO_BROADCAST,1)
_s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
_s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Under OSX we also need to set SO_REUSEPORT to 1
try:
@ -169,19 +188,26 @@ class OziUploader(object):
pass
try:
_s.sendto(json.dumps(packet).encode('ascii'), ('<broadcast>', self.payload_summary_port))
_s.sendto(
json.dumps(packet).encode("ascii"),
("<broadcast>", self.payload_summary_port),
)
# Catch any socket errors, that may occur when attempting to send to a broadcast address
# when there is no network connected. In this case, re-try and send to localhost instead.
except socket.error as e:
self.log_debug("Send to broadcast address failed, sending to localhost instead.")
_s.sendto(json.dumps(packet).encode('ascii'), ('127.0.0.1', self.payload_summary_port))
self.log_debug(
"Send to broadcast address failed, sending to localhost instead."
)
_s.sendto(
json.dumps(packet).encode("ascii"),
("127.0.0.1", self.payload_summary_port),
)
_s.close()
except Exception as e:
self.log_error("Error sending Payload Summary: %s" % str(e))
def process_queue(self):
""" Process packets from the input queue.
@ -206,9 +232,6 @@ class OziUploader(object):
time.sleep(self.update_rate)
def add(self, telemetry):
""" Add a dictionary of telemetry to the input queue.
@ -229,7 +252,6 @@ class OziUploader(object):
else:
self.log_error("Processing not running, discarding.")
def close(self):
""" Shutdown processing thread. """
self.log_debug("Waiting for processing thread to close...")
@ -238,7 +260,6 @@ class OziUploader(object):
if self.input_thread is not None:
self.input_thread.join()
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
@ -246,7 +267,6 @@ class OziUploader(object):
"""
logging.debug("OziMux - %s" % line)
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
@ -254,7 +274,6 @@ class OziUploader(object):
"""
logging.info("OziMux - %s" % line)
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:

Wyświetl plik

@ -21,9 +21,8 @@ except ImportError:
from queue import Queue
def read_rotator(rotctld_host='localhost', rotctld_port=4533, timeout=5):
''' Attempt to read a position from a rotctld server.
def read_rotator(rotctld_host="localhost", rotctld_port=4533, timeout=5):
""" Attempt to read a position from a rotctld server.
Args:
rotctld_host (str): Hostname of a rotctld instance.
@ -34,7 +33,7 @@ def read_rotator(rotctld_host='localhost', rotctld_port=4533, timeout=5):
list: [azimuth, elevation]
If unsuccessful:
None
'''
"""
try:
# Initialize the socket.
@ -46,15 +45,15 @@ def read_rotator(rotctld_host='localhost', rotctld_port=4533, timeout=5):
_s.connect((rotctld_host, rotctld_port))
# Send position request
_s.send(b'p\n')
_s.send(b"p\n")
# Attempt to receive reply.
_reply = _s.recv(4096)
# Split reply into lines
_fields = _reply.decode('ascii').split('\n')
_fields = _reply.decode("ascii").split("\n")
# Check for an error response, indicated by 'RPRT' in the returned line.
if 'RPRT' in _fields[0]:
if "RPRT" in _fields[0]:
logging.error("Rotator - rotctld reported error - %s" % _fields[0].strip())
return None
else:
@ -69,9 +68,10 @@ def read_rotator(rotctld_host='localhost', rotctld_port=4533, timeout=5):
return None
def set_rotator(rotctld_host='localhost', rotctld_port=4533, azimuth=0.0, elevation = 0.0, timeout=5):
''' Attempt to read a position from a rotctld server.
def set_rotator(
rotctld_host="localhost", rotctld_port=4533, azimuth=0.0, elevation=0.0, timeout=5
):
""" Attempt to read a position from a rotctld server.
Args:
rotctld_host (str): Hostname of a rotctld instance.
@ -84,7 +84,7 @@ def set_rotator(rotctld_host='localhost', rotctld_port=4533, azimuth=0.0, elevat
True
If unsuccessful:
False
'''
"""
try:
# Initialize the socket.
@ -101,13 +101,13 @@ def set_rotator(rotctld_host='localhost', rotctld_port=4533, azimuth=0.0, elevat
# Send position set command
_cmd = "P %.1f %.1f\n" % (_az, _el)
_s.send(_cmd.encode('ascii'))
_s.send(_cmd.encode("ascii"))
# Attempt to receive reply.
_reply = _s.recv(4096).decode('ascii')
_reply = _s.recv(4096).decode("ascii")
# Check for an 'OK' response, indicated by 'RPRT 0'
if 'RPRT 0' in _reply:
if "RPRT 0" in _reply:
return True
else:
# Anything else indicates an error.
@ -128,17 +128,19 @@ class Rotator(object):
"""
# We require the following fields to be present in the input telemetry dict.
REQUIRED_FIELDS = [ 'id', 'lat', 'lon', 'alt', 'type', 'freq']
REQUIRED_FIELDS = ["id", "lat", "lon", "alt", "type", "freq"]
def __init__(self,
station_position = (0.0,0.0,0.0),
rotctld_host = 'localhost',
rotctld_port = 4533,
rotator_update_rate = 30,
rotator_update_threshold = 5.0,
rotator_homing_enabled = False,
rotator_homing_delay = 10,
rotator_home_position = [0.0,0.0]):
def __init__(
self,
station_position=(0.0, 0.0, 0.0),
rotctld_host="localhost",
rotctld_port=4533,
rotator_update_rate=30,
rotator_update_threshold=5.0,
rotator_homing_enabled=False,
rotator_homing_delay=10,
rotator_home_position=[0.0, 0.0],
):
""" Start a new Rotator Control object.
Args:
@ -168,7 +170,6 @@ class Rotator(object):
self.rotator_homing_delay = rotator_homing_delay
self.rotator_home_position = rotator_home_position
# Latest telemetry.
self.latest_telemetry = None
self.latest_telemetry_time = 0
@ -179,12 +180,11 @@ class Rotator(object):
# Start queue processing thread.
self.rotator_thread_running = True
self.rotator_thread = Thread(target = self.rotator_update_thread)
self.rotator_thread = Thread(target=self.rotator_update_thread)
self.rotator_thread.start()
self.log_info("Started Rotator Thread")
def add(self, telemetry):
""" Add a telemetery dictionary to the input queue. """
# Check the telemetry dictionary contains the required fields.
@ -201,15 +201,15 @@ class Rotator(object):
finally:
self.telem_lock.release()
def move_rotator(self, azimuth, elevation):
''' Move the rotator to a new position, if the new position
""" Move the rotator to a new position, if the new position
is further than <rotator_update_threshold> away from the current position
'''
"""
# Get current position
_pos = read_rotator(rotctld_host = self.rotctld_host,
rotctld_port = self.rotctld_port)
_pos = read_rotator(
rotctld_host=self.rotctld_host, rotctld_port=self.rotctld_port
)
# If we can't get the current position of the rotator, then we won't be able to move it either
# May as well return immediately.
@ -221,28 +221,44 @@ class Rotator(object):
_curr_az = _pos[0] % 360.0
_curr_el = _pos[1]
if (abs(azimuth-_curr_az) > self.rotator_update_threshold) or (abs(elevation-_curr_el) > self.rotator_update_threshold):
if (abs(azimuth - _curr_az) > self.rotator_update_threshold) or (
abs(elevation - _curr_el) > self.rotator_update_threshold
):
# Move to the target position.
self.log_info("New rotator target is outside current antenna view (%.1f, %.1f +/- %.1f deg), moving rotator to %.1f, %.1f" % (_curr_az, _curr_el, self.rotator_update_threshold, azimuth, elevation))
return set_rotator(rotctld_host = self.rotctld_host,
rotctld_port = self.rotctld_port,
azimuth = azimuth,
elevation = elevation)
self.log_info(
"New rotator target is outside current antenna view (%.1f, %.1f +/- %.1f deg), moving rotator to %.1f, %.1f"
% (
_curr_az,
_curr_el,
self.rotator_update_threshold,
azimuth,
elevation,
)
)
return set_rotator(
rotctld_host=self.rotctld_host,
rotctld_port=self.rotctld_port,
azimuth=azimuth,
elevation=elevation,
)
else:
# We are close enough to the target position, no need to move yet.
self.log_debug("New target is within current antenna view (%.1f, %.1f +/- %.1f deg), not moving rotator." % (_curr_az, _curr_el, self.rotator_update_threshold))
self.log_debug(
"New target is within current antenna view (%.1f, %.1f +/- %.1f deg), not moving rotator."
% (_curr_az, _curr_el, self.rotator_update_threshold)
)
return True
def home_rotator(self):
''' Move the rotator to it's home position '''
""" Move the rotator to it's home position """
self.log_info("Moving rotator to home position.")
self.move_rotator(azimuth = self.rotator_home_position[0],
elevation = self.rotator_home_position[1])
self.move_rotator(
azimuth=self.rotator_home_position[0],
elevation=self.rotator_home_position[1],
)
def rotator_update_thread(self):
''' Rotator updater thread '''
""" Rotator updater thread """
if self.rotator_homing_enabled:
# Move rotator to 'home' position on startup.
@ -266,40 +282,45 @@ class Rotator(object):
if _telem != None:
try:
# Check if the telemetry is very old.
_telem_age = time.time() -_telem_time
_telem_age = time.time() - _telem_time
# If the telemetry is older than our homing delay, move to our home position.
if _telem_age > self.rotator_homing_delay*60.0:
if _telem_age > self.rotator_homing_delay * 60.0:
self.home_rotator()
else:
# Check that the station position is not 0,0
if (self.station_position[0] == 0.0) and (self.station_position[1] == 0.0):
self.log_error("Station position is 0,0 - not moving rotator.")
if (self.station_position[0] == 0.0) and (
self.station_position[1] == 0.0
):
self.log_error(
"Station position is 0,0 - not moving rotator."
)
else:
# Otherwise, calculate the new azimuth/elevation.
_position = position_info(self.station_position, [_telem['lat'],_telem['lon'],_telem['alt']])
_position = position_info(
self.station_position,
[_telem["lat"], _telem["lon"], _telem["alt"]],
)
# Move to the new position
self.move_rotator(_position['bearing'], _position['elevation'])
self.move_rotator(
_position["bearing"], _position["elevation"]
)
except Exception as e:
self.log_error("Error handling new telemetry - %s" % str(e))
# Wait until the next update time.
_i = 0
while (_i < self.rotator_update_rate) and self.rotator_thread_running:
time.sleep(1)
_i += 1
def update_station_position(self, lat, lon, alt):
""" Update the internal station position record. Used when determining the station position by GPSD """
self.station_position = (lat, lon, alt)
def close(self):
""" Close input processing thread. """
self.rotator_thread_running = False
@ -309,7 +330,6 @@ class Rotator(object):
self.log_debug("Stopped rotator control thread.")
def running(self):
""" Check if the logging thread is running.
@ -318,7 +338,6 @@ class Rotator(object):
"""
return self.rotator_thread_running
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
@ -326,7 +345,6 @@ class Rotator(object):
"""
logging.debug("Rotator - %s" % line)
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
@ -334,7 +352,6 @@ class Rotator(object):
"""
logging.info("Rotator - %s" % line)
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:
@ -343,9 +360,10 @@ class Rotator(object):
logging.error("Rotator - %s" % line)
if __name__ == '__main__':
if __name__ == "__main__":
import sys
_host = sys.argv[1]
print(read_rotator(rotctld_host = _host))
print(set_rotator(rotctld_host = _host, azimuth=0.0, elevation = 0.0))
print(read_rotator(rotctld_host=_host))
print(set_rotator(rotctld_host=_host, azimuth=0.0, elevation=0.0))

Wyświetl plik

@ -15,7 +15,14 @@ import time
import traceback
from threading import Thread, Lock
from types import FunctionType, MethodType
from .utils import detect_peaks, rtlsdr_test, reset_rtlsdr_by_serial, reset_all_rtlsdrs, peak_decimation
from .utils import (
detect_peaks,
rtlsdr_test,
reset_rtlsdr_by_serial,
reset_all_rtlsdrs,
peak_decimation,
)
try:
# Python 2
from StringIO import StringIO
@ -31,10 +38,30 @@ except ImportError:
print("Running in a test scenario, no data emitted to flask.")
pass
# Global for latest scan result
scan_result = {'freq':[], 'power':[], 'peak_freq':[], 'peak_lvl':[], 'timestamp':'No data yet.', 'threshold':0}
def run_rtl_power(start, stop, step, filename="log_power.csv", dwell = 20, sdr_power='rtl_power', device_idx = 0, ppm = 0, gain = -1, bias = False):
# Global for latest scan result
scan_result = {
"freq": [],
"power": [],
"peak_freq": [],
"peak_lvl": [],
"timestamp": "No data yet.",
"threshold": 0,
}
def run_rtl_power(
start,
stop,
step,
filename="log_power.csv",
dwell=20,
sdr_power="rtl_power",
device_idx=0,
ppm=0,
gain=-1,
bias=False,
):
""" Capture spectrum data using rtl_power (or drop-in equivalent), and save to a file.
Args:
@ -60,9 +87,9 @@ def run_rtl_power(start, stop, step, filename="log_power.csv", dwell = 20, sdr_p
# Add a gain parameter if we have been provided one.
if gain != -1:
gain_param = '-g %.1f ' % gain
gain_param = "-g %.1f " % gain
else:
gain_param = ''
gain_param = ""
# If the output log file exists, remove it.
if os.path.exists(filename):
@ -71,45 +98,68 @@ def run_rtl_power(start, stop, step, filename="log_power.csv", dwell = 20, sdr_p
# Add -k 30 option, to SIGKILL rtl_power 30 seconds after the regular timeout expires.
# Note that this only works with the GNU Coreutils version of Timeout, not the IBM version,
# which is provided with OSX (Darwin).
if 'Darwin' in platform.platform():
timeout_kill = ''
if "Darwin" in platform.platform():
timeout_kill = ""
else:
timeout_kill = '-k 30 '
timeout_kill = "-k 30 "
rtl_power_cmd = "timeout %s%d %s %s-f %d:%d:%d -i %d -1 -c 20%% -p %d -d %s %s%s" % (
timeout_kill,
dwell+10,
sdr_power,
bias_option,
start,
stop,
step,
dwell,
int(ppm), # Should this be an int?
str(device_idx),
gain_param,
filename)
rtl_power_cmd = (
"timeout %s%d %s %s-f %d:%d:%d -i %d -1 -c 20%% -p %d -d %s %s%s"
% (
timeout_kill,
dwell + 10,
sdr_power,
bias_option,
start,
stop,
step,
dwell,
int(ppm), # Should this be an int?
str(device_idx),
gain_param,
filename,
)
)
logging.info("Scanner #%s - Running frequency scan." % str(device_idx))
logging.debug("Scanner #%s - Running command: %s" % (str(device_idx), rtl_power_cmd))
logging.debug(
"Scanner #%s - Running command: %s" % (str(device_idx), rtl_power_cmd)
)
try:
_output = subprocess.check_output(rtl_power_cmd, shell=True, stderr=subprocess.STDOUT)
_output = subprocess.check_output(
rtl_power_cmd, shell=True, stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError as e:
# Something went wrong...
logging.critical("Scanner #%s - rtl_power call failed with return code %s." % (str(device_idx), e.returncode))
logging.critical(
"Scanner #%s - rtl_power call failed with return code %s."
% (str(device_idx), e.returncode)
)
# Look at the error output in a bit more details.
_output = e.output.decode('ascii')
if 'No supported devices found' in _output:
logging.critical("Scanner #%s - rtl_power could not find device with ID %s, is your configuration correct?" % (str(device_idx), str(device_idx)))
elif 'illegal option' in _output:
_output = e.output.decode("ascii")
if "No supported devices found" in _output:
logging.critical(
"Scanner #%s - rtl_power could not find device with ID %s, is your configuration correct?"
% (str(device_idx), str(device_idx))
)
elif "illegal option" in _output:
if bias:
logging.critical("Scanner #%s - rtl_power reported an illegal option was used. Are you using a rtl_power version with bias tee support?" % str(device_idx))
logging.critical(
"Scanner #%s - rtl_power reported an illegal option was used. Are you using a rtl_power version with bias tee support?"
% str(device_idx)
)
else:
logging.critical("Scanner #%s - rtl_power reported an illegal option was used. (This shouldn't happen... are you running an ancient version?)" % str(device_idx))
logging.critical(
"Scanner #%s - rtl_power reported an illegal option was used. (This shouldn't happen... are you running an ancient version?)"
% str(device_idx)
)
else:
# Something else odd happened, dump the entire error output to the log for further analysis.
logging.critical("Scanner #%s - rtl_power reported error: %s" % (str(device_idx),_output))
logging.critical(
"Scanner #%s - rtl_power reported error: %s"
% (str(device_idx), _output)
)
return False
else:
@ -117,7 +167,6 @@ def run_rtl_power(start, stop, step, filename="log_power.csv", dwell = 20, sdr_p
return True
def read_rtl_power(filename):
""" Read in frequency samples from a single-shot log file produced by rtl_power
@ -138,20 +187,23 @@ def read_rtl_power(filename):
freq_step = 0
# Open file.
f = open(filename,'r')
f = open(filename, "r")
# rtl_power log files are csv's, with the first 6 fields in each line describing the time and frequency scan parameters
# for the remaining fields, which contain the power samples.
# for the remaining fields, which contain the power samples.
for line in f:
# Split line into fields.
fields = line.split(',')
fields = line.split(",")
if len(fields) < 6:
logging.error("Scanner - Invalid number of samples in input file - corrupt?")
raise Exception("Scanner - Invalid number of samples in input file - corrupt?")
logging.error(
"Scanner - Invalid number of samples in input file - corrupt?"
)
raise Exception(
"Scanner - Invalid number of samples in input file - corrupt?"
)
start_date = fields[0]
start_time = fields[1]
@ -160,9 +212,9 @@ def read_rtl_power(filename):
freq_step = float(fields[4])
n_samples = int(fields[5])
#freq_range = np.arange(start_freq,stop_freq,freq_step)
samples = np.loadtxt(StringIO(",".join(fields[6:])),delimiter=',')
freq_range = np.linspace(start_freq,stop_freq,len(samples))
# freq_range = np.arange(start_freq,stop_freq,freq_step)
samples = np.loadtxt(StringIO(",".join(fields[6:])), delimiter=",")
freq_range = np.linspace(start_freq, stop_freq, len(samples))
# Add frequency range and samples to output buffers.
freq = np.append(freq, freq_range)
@ -176,7 +228,18 @@ def read_rtl_power(filename):
return (freq, power, freq_step)
def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device_idx=0, ppm=0, gain=-1, bias=False, save_detection_audio = False, ngp_tweak = False):
def detect_sonde(
frequency,
rs_path="./",
dwell_time=10,
sdr_fm="rtl_fm",
device_idx=0,
ppm=0,
gain=-1,
bias=False,
save_detection_audio=False,
ngp_tweak=False,
):
""" Receive some FM and attempt to detect the presence of a radiosonde.
Args:
@ -216,14 +279,14 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device
# Add a gain parameter if we have been provided one.
if gain != -1:
gain_param = '-g %.1f ' % gain
gain_param = "-g %.1f " % gain
else:
gain_param = ''
gain_param = ""
# Adjust the detection bandwidth based on the band the scanning is occuring in.
if frequency < 1000e6:
# 400-406 MHz sondes - use a 22 kHz detection bandwidth.
_mode = 'IQ'
_mode = "IQ"
_iq_bw = 48000
_if_bw = 20
else:
@ -234,32 +297,64 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device
# to narrow this if only RS92-NGPs are expected.
if ngp_tweak:
# RS92-NGP detection
_mode = 'IQ'
_mode = "IQ"
_iq_bw = 48000
_if_bw = 32
else:
# LMS6-1680 Detection
_mode = 'FM'
_mode = "FM"
_rx_bw = 200000
if _mode == 'IQ':
if _mode == "IQ":
# IQ decoding
# Sample source (rtl_fm, in IQ mode)
rx_test_command = "timeout %ds %s %s-p %d -d %s %s-M raw -F9 -s %d -f %d 2>/dev/null |" % (dwell_time*2, sdr_fm, bias_option, int(ppm), str(device_idx), gain_param, _iq_bw, frequency)
rx_test_command = (
"timeout %ds %s %s-p %d -d %s %s-M raw -F9 -s %d -f %d 2>/dev/null |"
% (
dwell_time * 2,
sdr_fm,
bias_option,
int(ppm),
str(device_idx),
gain_param,
_iq_bw,
frequency,
)
)
# Saving of Debug audio, if enabled,
if save_detection_audio:
rx_test_command += "tee detect_%s.raw | " % str(device_idx)
rx_test_command += os.path.join(rs_path,"dft_detect") + " -t %d --iq --bw %d --dc - %d 16 2>/dev/null" % (dwell_time, _if_bw, _iq_bw)
elif _mode == 'FM':
rx_test_command += os.path.join(
rs_path, "dft_detect"
) + " -t %d --iq --bw %d --dc - %d 16 2>/dev/null" % (
dwell_time,
_if_bw,
_iq_bw,
)
elif _mode == "FM":
# FM decoding
# Sample Source (rtl_fm)
rx_test_command = "timeout %ds %s %s-p %d -d %s %s-M fm -F9 -s %d -f %d 2>/dev/null |" % (dwell_time*2, sdr_fm, bias_option, int(ppm), str(device_idx), gain_param, _rx_bw, frequency)
rx_test_command = (
"timeout %ds %s %s-p %d -d %s %s-M fm -F9 -s %d -f %d 2>/dev/null |"
% (
dwell_time * 2,
sdr_fm,
bias_option,
int(ppm),
str(device_idx),
gain_param,
_rx_bw,
frequency,
)
)
# Sample filtering
rx_test_command += "sox -t raw -r %d -e s -b 16 -c 1 - -r 48000 -t wav - highpass 20 2>/dev/null | " % _rx_bw
rx_test_command += (
"sox -t raw -r %d -e s -b 16 -c 1 - -r 48000 -t wav - highpass 20 2>/dev/null | "
% _rx_bw
)
# Saving of Debug audio, if enabled,
if save_detection_audio:
@ -267,18 +362,24 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device
# Sample decoding / detection
# Note that we detect for dwell_time seconds, and timeout after dwell_time*2, to catch if no samples are being passed through.
rx_test_command += os.path.join(rs_path,"dft_detect") + " -t %d 2>/dev/null" % dwell_time
rx_test_command += (
os.path.join(rs_path, "dft_detect") + " -t %d 2>/dev/null" % dwell_time
)
logging.debug("Scanner #%s - Using detection command: %s" % (str(device_idx), rx_test_command))
logging.debug("Scanner #%s - Attempting sonde detection on %.3f MHz" % (str(device_idx), frequency/1e6))
logging.debug(
"Scanner #%s - Using detection command: %s" % (str(device_idx), rx_test_command)
)
logging.debug(
"Scanner #%s - Attempting sonde detection on %.3f MHz"
% (str(device_idx), frequency / 1e6)
)
try:
FNULL = open(os.devnull, 'w')
FNULL = open(os.devnull, "w")
_start = time.time()
ret_output = subprocess.check_output(rx_test_command, shell=True, stderr=FNULL)
FNULL.close()
ret_output = ret_output.decode('utf8')
ret_output = ret_output.decode("utf8")
except subprocess.CalledProcessError as e:
# dft_detect returns a code of 1 if no sonde is detected.
# logging.debug("Scanner - dfm_detect return code: %s" % e.returncode)
@ -287,33 +388,39 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device
raise IOError("Possible RTLSDR lockup.")
elif e.returncode >= 2:
ret_output = e.output.decode('utf8')
ret_output = e.output.decode("utf8")
else:
_runtime = time.time() - _start
logging.debug("Scanner #%s - dft_detect exited in %.1f seconds with return code %d." % (str(device_idx), _runtime, e.returncode))
logging.debug(
"Scanner #%s - dft_detect exited in %.1f seconds with return code %d."
% (str(device_idx), _runtime, e.returncode)
)
return (None, 0.0)
except Exception as e:
# Something broke when running the detection function.
logging.error("Scanner #%s - Error when running dft_detect - %s" % (str(device_idx), str(e)))
logging.error(
"Scanner #%s - Error when running dft_detect - %s"
% (str(device_idx), str(e))
)
return (None, 0.0)
_runtime = time.time() - _start
logging.debug("Scanner - dft_detect exited in %.1f seconds with return code 1." % _runtime)
logging.debug(
"Scanner - dft_detect exited in %.1f seconds with return code 1." % _runtime
)
# Check for no output from dft_detect.
if ret_output is None or ret_output == "":
#logging.error("Scanner - dft_detect returned no output?")
# logging.error("Scanner - dft_detect returned no output?")
return (None, 0.0)
# Split the line into sonde type and correlation score.
_fields = ret_output.split(':')
_fields = ret_output.split(":")
if len(_fields) <2:
logging.error("Scanner - malformed output from dft_detect: %s" % ret_output.strip())
if len(_fields) < 2:
logging.error(
"Scanner - malformed output from dft_detect: %s" % ret_output.strip()
)
return (None, 0.0)
_type = _fields[0]
@ -321,66 +428,99 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device
# Detect any frequency correction information:
try:
if ',' in _score:
_offset_est = float(_score.split(',')[1].split('Hz')[0].strip())
_score = float(_score.split(',')[0].strip())
if "," in _score:
_offset_est = float(_score.split(",")[1].split("Hz")[0].strip())
_score = float(_score.split(",")[0].strip())
else:
_score = float(_score.strip())
_offset_est = 0.0
except Exception as e:
logging.error("Scanner - Error parsing dft_detect output: %s" % ret_output.strip())
logging.error(
"Scanner - Error parsing dft_detect output: %s" % ret_output.strip()
)
return (None, 0.0)
_sonde_type = None
if 'RS41' in _type:
logging.debug("Scanner #%s - Detected a RS41! (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
if "RS41" in _type:
logging.debug(
"Scanner #%s - Detected a RS41! (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
_sonde_type = "RS41"
elif 'RS92' in _type:
logging.debug("Scanner #%s - Detected a RS92! (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
elif "RS92" in _type:
logging.debug(
"Scanner #%s - Detected a RS92! (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
_sonde_type = "RS92"
elif 'DFM' in _type:
logging.debug("Scanner #%s - Detected a DFM Sonde! (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
elif "DFM" in _type:
logging.debug(
"Scanner #%s - Detected a DFM Sonde! (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
_sonde_type = "DFM"
elif 'M10' in _type:
logging.debug("Scanner #%s - Detected a M10 Sonde! (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
elif "M10" in _type:
logging.debug(
"Scanner #%s - Detected a M10 Sonde! (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
_sonde_type = "M10"
elif 'M20' in _type:
logging.debug("Scanner #%s - Detected a M20 Sonde! (Not yet supported...) (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
elif "M20" in _type:
logging.debug(
"Scanner #%s - Detected a M20 Sonde! (Not yet supported...) (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
_sonde_type = "M20"
elif 'IMET4' in _type:
logging.debug("Scanner #%s - Detected a iMet-4 Sonde! (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
elif "IMET4" in _type:
logging.debug(
"Scanner #%s - Detected a iMet-4 Sonde! (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
_sonde_type = "IMET"
elif 'IMET1' in _type:
logging.debug("Scanner #%s - Detected a iMet Sonde! (Type %s - Unsupported) (Score: %.2f)" % (str(device_idx), _type, _score))
elif "IMET1" in _type:
logging.debug(
"Scanner #%s - Detected a iMet Sonde! (Type %s - Unsupported) (Score: %.2f)"
% (str(device_idx), _type, _score)
)
_sonde_type = "IMET1"
elif 'LMS6' in _type:
logging.debug("Scanner #%s - Detected a LMS6 Sonde! (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
elif "LMS6" in _type:
logging.debug(
"Scanner #%s - Detected a LMS6 Sonde! (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
_sonde_type = "LMS6"
elif 'C34' in _type:
logging.debug("Scanner #%s - Detected a Meteolabor C34/C50 Sonde! (Not yet supported...) (Score: %.2f)" % (str(device_idx), _score))
elif "C34" in _type:
logging.debug(
"Scanner #%s - Detected a Meteolabor C34/C50 Sonde! (Not yet supported...) (Score: %.2f)"
% (str(device_idx), _score)
)
_sonde_type = "C34C50"
elif 'MK2LMS' in _type:
logging.debug("Scanner #%s - Detected a 1680 MHz LMS6 Sonde (MK2A Telemetry)! (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
elif "MK2LMS" in _type:
logging.debug(
"Scanner #%s - Detected a 1680 MHz LMS6 Sonde (MK2A Telemetry)! (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
if _score < 0:
_sonde_type = '-MK2LMS'
_sonde_type = "-MK2LMS"
else:
_sonde_type = 'MK2LMS'
elif 'MEISEI' in _type:
logging.debug("Scanner #%s - Detected a Meisei Sonde! (Score: %.2f, Offset: %.1f Hz)" % (str(device_idx), _score, _offset_est))
_sonde_type = "MK2LMS"
elif "MEISEI" in _type:
logging.debug(
"Scanner #%s - Detected a Meisei Sonde! (Score: %.2f, Offset: %.1f Hz)"
% (str(device_idx), _score, _offset_est)
)
# Not currently sure if we expect to see inverted Meisei sondes.
if _score < 0:
_sonde_type = '-MEISEI'
_sonde_type = "-MEISEI"
else:
_sonde_type = 'MEISEI'
_sonde_type = "MEISEI"
else:
_sonde_type = None
return (_sonde_type, _offset_est)
#
# Radiosonde Scanner Class
#
@ -392,34 +532,36 @@ class SondeScanner(object):
# Allow up to X consecutive scan errors before giving up.
SONDE_SCANNER_MAX_ERRORS = 5
def __init__(self,
callback = None,
auto_start = True,
min_freq = 400.0,
max_freq = 403.0,
search_step = 800.0,
whitelist = [],
greylist = [],
blacklist = [],
snr_threshold = 10,
min_distance = 1000,
quantization = 10000,
scan_dwell_time = 20,
detect_dwell_time = 5,
scan_delay = 10,
max_peaks = 10,
scan_check_interval = 10,
rs_path = "./",
sdr_power = "rtl_power",
sdr_fm = "rtl_fm",
device_idx = 0,
gain = -1,
ppm = 0,
bias = False,
save_detection_audio = False,
temporary_block_list = {},
temporary_block_time = 60,
ngp_tweak = False):
def __init__(
self,
callback=None,
auto_start=True,
min_freq=400.0,
max_freq=403.0,
search_step=800.0,
whitelist=[],
greylist=[],
blacklist=[],
snr_threshold=10,
min_distance=1000,
quantization=10000,
scan_dwell_time=20,
detect_dwell_time=5,
scan_delay=10,
max_peaks=10,
scan_check_interval=10,
rs_path="./",
sdr_power="rtl_power",
sdr_fm="rtl_fm",
device_idx=0,
gain=-1,
ppm=0,
bias=False,
save_detection_audio=False,
temporary_block_list={},
temporary_block_time=60,
ngp_tweak=False,
):
""" Initialise a Sonde Scanner Object.
Apologies for the huge number of args...
@ -490,10 +632,13 @@ class SondeScanner(object):
self.temporary_block_time = temporary_block_time
# Alert the user if there are temporary blocks in place.
if len(self.temporary_block_list.keys())>0:
self.log_info("Temporary blocks in place for frequencies: %s" % str(self.temporary_block_list.keys()))
if len(self.temporary_block_list.keys()) > 0:
self.log_info(
"Temporary blocks in place for frequencies: %s"
% str(self.temporary_block_list.keys())
)
# Error counter.
# Error counter.
self.error_retries = 0
# Count how many scans we have performed.
@ -528,7 +673,6 @@ class SondeScanner(object):
else:
self.log_warning("Sonde scan already running!")
def send_to_callback(self, results):
""" Send scan results to a callback.
@ -544,7 +688,6 @@ class SondeScanner(object):
except Exception as e:
self.log_error("Error handling scan results - %s" % str(e))
def scan_loop(self):
""" Continually perform scans, and pass any results onto the callback function """
@ -553,7 +696,9 @@ class SondeScanner(object):
# If we have hit the maximum number of permissable errors, quit.
if self.error_retries > self.SONDE_SCANNER_MAX_ERRORS:
self.log_error("Exceeded maximum number of consecutive RTLSDR errors. Closing scan thread.")
self.log_error(
"Exceeded maximum number of consecutive RTLSDR errors. Closing scan thread."
)
break
# If we are using a whitelist, we don't have an easy way of checking the RTLSDR
@ -564,7 +709,9 @@ class SondeScanner(object):
self.log_debug("Performing periodic check of RTLSDR.")
_rtlsdr_ok = rtlsdr_test(self.device_idx)
if not _rtlsdr_ok:
self.log_error("Unrecoverable RTLSDR error. Closing scan thread.")
self.log_error(
"Unrecoverable RTLSDR error. Closing scan thread."
)
break
try:
@ -572,11 +719,11 @@ class SondeScanner(object):
except (IOError, ValueError) as e:
# No log file produced. Reset the RTLSDR and try again.
#traceback.print_exc()
# traceback.print_exc()
self.log_warning("RTLSDR produced no output... resetting and retrying.")
self.error_retries += 1
# Attempt to reset the RTLSDR.
if self.device_idx == '0':
if self.device_idx == "0":
# If the device ID is 0, we assume we only have a single RTLSDR on this system.
reset_all_rtlsdrs()
else:
@ -596,14 +743,10 @@ class SondeScanner(object):
# Sleep before starting the next scan.
time.sleep(self.scan_delay)
self.log_info("Scanner Thread Closed.")
self.sonde_scanner_running = False
def sonde_search(self,
first_only = False):
def sonde_search(self, first_only=False):
""" Perform a frequency scan across a defined frequency range, and test each detected peak for the presence of a radiosonde.
In order, this function:
@ -626,10 +769,11 @@ class SondeScanner(object):
_search_results = []
if len(self.whitelist) == 0 :
if len(self.whitelist) == 0:
# No whitelist frequencies provided - perform a scan.
run_rtl_power(self.min_freq*1e6,
self.max_freq*1e6,
run_rtl_power(
self.min_freq * 1e6,
self.max_freq * 1e6,
self.search_step,
filename="log_power_%s.csv" % self.device_idx,
dwell=self.scan_dwell_time,
@ -637,7 +781,8 @@ class SondeScanner(object):
device_idx=self.device_idx,
ppm=self.ppm,
gain=self.gain,
bias=self.bias)
bias=self.bias,
)
# Exit opportunity.
if self.sonde_scanner_running == False:
@ -647,33 +792,37 @@ class SondeScanner(object):
# This step will throw an IOError if the file does not exist.
(freq, power, step) = read_rtl_power("log_power_%s.csv" % self.device_idx)
# Sanity check results.
if step == 0 or len(freq)==0 or len(power)==0:
if step == 0 or len(freq) == 0 or len(power) == 0:
# Otherwise, if a file has been written but contains no data, it can indicate
# an issue with the RTLSDR. Sometimes these issues can be resolved by issuing a usb reset to the RTLSDR.
raise ValueError("Invalid Log File")
# Update the global scan result
(_freq_decimate, _power_decimate) = peak_decimation(freq/1e6, power, 10)
scan_result['freq'] = list(_freq_decimate)
scan_result['power'] = list(_power_decimate)
scan_result['timestamp'] = datetime.datetime.utcnow().isoformat()
scan_result['peak_freq'] = []
scan_result['peak_lvl'] = []
(_freq_decimate, _power_decimate) = peak_decimation(freq / 1e6, power, 10)
scan_result["freq"] = list(_freq_decimate)
scan_result["power"] = list(_power_decimate)
scan_result["timestamp"] = datetime.datetime.utcnow().isoformat()
scan_result["peak_freq"] = []
scan_result["peak_lvl"] = []
# Rough approximation of the noise floor of the received power spectrum.
power_nf = np.mean(power)
# Pass the threshold data to the web client for plotting
scan_result['threshold'] = power_nf
scan_result["threshold"] = power_nf
# Detect peaks.
peak_indices = detect_peaks(power, mph=(power_nf+self.snr_threshold), mpd=(self.min_distance/step), show = False)
peak_indices = detect_peaks(
power,
mph=(power_nf + self.snr_threshold),
mpd=(self.min_distance / step),
show=False,
)
# If we have found no peaks, and no greylist has been provided, re-scan.
if (len(peak_indices) == 0) and (len(self.greylist) == 0):
self.log_debug("No peaks found.")
# Emit a notification to the client that a scan is complete.
flask_emit_event('scan_event')
flask_emit_event("scan_event")
return []
# Sort peaks by power.
@ -682,45 +831,60 @@ class SondeScanner(object):
peak_frequencies = peak_freqs[np.argsort(peak_powers)][::-1]
# Quantize to nearest x Hz
peak_frequencies = np.round(peak_frequencies/self.quantization)*self.quantization
peak_frequencies = (
np.round(peak_frequencies / self.quantization) * self.quantization
)
# Remove any duplicate entries after quantization, but preserve order.
_, peak_idx = np.unique(peak_frequencies, return_index=True)
peak_frequencies = peak_frequencies[np.sort(peak_idx)]
# Blacklist & Temporary block list behaviour change as of v1.2.3
# Was: peak_frequencies==_frequency (This only matched an exact frequency in the blacklist)
# Now (1.2.3): Block if the peak frequency is within +/-quantization/2.0 of a blacklist or blocklist frequency.
# Remove any frequencies in the blacklist.
for _frequency in np.array(self.blacklist)*1e6:
_index = np.argwhere(np.abs(peak_frequencies-_frequency) < (self.quantization/2.0))
for _frequency in np.array(self.blacklist) * 1e6:
_index = np.argwhere(
np.abs(peak_frequencies - _frequency) < (self.quantization / 2.0)
)
peak_frequencies = np.delete(peak_frequencies, _index)
# Limit to the user-defined number of peaks to search over.
if len(peak_frequencies) > self.max_peaks:
peak_frequencies = peak_frequencies[:self.max_peaks]
peak_frequencies = peak_frequencies[: self.max_peaks]
# Append on any frequencies in the supplied greylist
peak_frequencies = np.append(np.array(self.greylist)*1e6, peak_frequencies)
peak_frequencies = np.append(
np.array(self.greylist) * 1e6, peak_frequencies
)
# Remove any frequencies in the temporary block list
self.temporary_block_list_lock.acquire()
for _frequency in self.temporary_block_list.copy().keys():
# Check the time the block was added.
if self.temporary_block_list[_frequency] > (time.time()-self.temporary_block_time*60):
if self.temporary_block_list[_frequency] > (
time.time() - self.temporary_block_time * 60
):
# We should still be blocking this frequency, so remove any peaks with this frequency.
_index = np.argwhere(np.abs(peak_frequencies-_frequency) < (self.quantization/2.0))
_index = np.argwhere(
np.abs(peak_frequencies - _frequency)
< (self.quantization / 2.0)
)
peak_frequencies = np.delete(peak_frequencies, _index)
if len(_index) > 0:
self.log_debug("Peak on %.3f MHz was removed due to temporary block." % (_frequency/1e6))
self.log_debug(
"Peak on %.3f MHz was removed due to temporary block."
% (_frequency / 1e6)
)
else:
# This frequency doesn't need to be blocked any more, remove it from the block list.
self.temporary_block_list.pop(_frequency)
self.log_info("Removed %.3f MHz from temporary block list." % (_frequency/1e6))
self.log_info(
"Removed %.3f MHz from temporary block list."
% (_frequency / 1e6)
)
self.temporary_block_list_lock.release()
@ -731,34 +895,45 @@ class SondeScanner(object):
for _peak in peak_frequencies:
try:
# Find the index of the peak within our decimated frequency array.
_peak_power_idx = np.argmin(np.abs(scan_result['freq']-_peak/1e6))
_peak_power_idx = np.argmin(
np.abs(scan_result["freq"] - _peak / 1e6)
)
# Because we've decimated the freq & power data, the peak location may
# not be exactly at this frequency, so we take the maximum of an area
# around this location.
_peak_search_min = max(0,_peak_power_idx-5)
_peak_search_max = min(len(scan_result['freq'])-1, _peak_power_idx+5)
_peak_search_min = max(0, _peak_power_idx - 5)
_peak_search_max = min(
len(scan_result["freq"]) - 1, _peak_power_idx + 5
)
# Grab the maximum value, and append it and the frequency to the output arrays
_peak_lvl.append(max(scan_result['power'][_peak_search_min:_peak_search_max]))
_peak_freq.append(_peak/1e6)
_peak_lvl.append(
max(scan_result["power"][_peak_search_min:_peak_search_max])
)
_peak_freq.append(_peak / 1e6)
except:
pass
# Add the peak results to our global scan result dictionary.
scan_result['peak_freq'] = _peak_freq
scan_result['peak_lvl'] = _peak_lvl
scan_result["peak_freq"] = _peak_freq
scan_result["peak_lvl"] = _peak_lvl
# Tell the web client we have new data.
flask_emit_event('scan_event')
flask_emit_event("scan_event")
if len(peak_frequencies) == 0:
self.log_debug("No peaks found after blacklist frequencies removed.")
return []
else:
self.log_info("Detected peaks on %d frequencies (MHz): %s" % (len(peak_frequencies),str(peak_frequencies/1e6)))
self.log_info(
"Detected peaks on %d frequencies (MHz): %s"
% (len(peak_frequencies), str(peak_frequencies / 1e6))
)
else:
# We have been provided a whitelist - scan through the supplied frequencies.
peak_frequencies = np.array(self.whitelist)*1e6
self.log_info("Scanning on whitelist frequencies (MHz): %s" % str(peak_frequencies/1e6))
peak_frequencies = np.array(self.whitelist) * 1e6
self.log_info(
"Scanning on whitelist frequencies (MHz): %s"
% str(peak_frequencies / 1e6)
)
# Run rs_detect on each peak frequency, to determine if there is a sonde there.
for freq in peak_frequencies:
@ -769,19 +944,20 @@ class SondeScanner(object):
if self.sonde_scanner_running == False:
return []
(detected, offset_est) = detect_sonde(_freq,
(detected, offset_est) = detect_sonde(
_freq,
sdr_fm=self.sdr_fm,
device_idx=self.device_idx,
ppm=self.ppm,
gain=self.gain,
bias=self.bias,
dwell_time=self.detect_dwell_time,
save_detection_audio=self.save_detection_audio)
save_detection_audio=self.save_detection_audio,
)
if detected != None:
# Quantize the detected frequency (with offset) to 1 kHz
_freq = round((_freq + offset_est)/1000.0)*1000.0
_freq = round((_freq + offset_est) / 1000.0) * 1000.0
# Add a detected sonde to the output array
_search_results.append([_freq, detected])
@ -801,8 +977,7 @@ class SondeScanner(object):
return _search_results
def oneshot(self, first_only = False):
def oneshot(self, first_only=False):
""" Perform a once-off scan attempt
Args:
@ -820,12 +995,10 @@ class SondeScanner(object):
else:
# Otherwise, attempt a scan.
self.sonde_scanner_running = True
_result = self.sonde_search(first_only = first_only)
_result = self.sonde_search(first_only=first_only)
self.sonde_scanner_running = False
return _result
def stop(self):
""" Stop the Scan Loop """
self.log_info("Waiting for current scan to finish...")
@ -835,12 +1008,10 @@ class SondeScanner(object):
if self.sonde_scan_thread != None:
self.sonde_scan_thread.join()
def running(self):
""" Check if the scanner is running """
return self.sonde_scanner_running
def add_temporary_block(self, frequency):
""" Add a frequency to the temporary block list.
@ -852,58 +1023,58 @@ class SondeScanner(object):
self.temporary_block_list_lock.acquire()
self.temporary_block_list[frequency] = time.time()
self.temporary_block_list_lock.release()
self.log_info("Adding temporary block for frequency %.3f MHz." % (frequency/1e6))
self.log_info(
"Adding temporary block for frequency %.3f MHz." % (frequency / 1e6)
)
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.debug("Scanner #%s - %s" % (self.device_idx,line))
logging.debug("Scanner #%s - %s" % (self.device_idx, line))
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.info("Scanner #%s - %s" % (self.device_idx,line))
logging.info("Scanner #%s - %s" % (self.device_idx, line))
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.error("Scanner #%s - %s" % (self.device_idx,line))
logging.error("Scanner #%s - %s" % (self.device_idx, line))
def log_warning(self, line):
""" Helper function to log a warning message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.warning("Scanner #%s - %s" % (self.device_idx,line))
logging.warning("Scanner #%s - %s" % (self.device_idx, line))
if __name__ == "__main__":
# Basic test script - run a scan using default parameters.
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
# Callback to handle scan results
def print_result(scan_result):
print("SCAN RESULT: " + str(scan_result))
# Local spurs at my house :-)
blacklist = [401.7,401.32,402.09,402.47,400.17,402.85]
blacklist = [401.7, 401.32, 402.09, 402.47, 400.17, 402.85]
# Instantiate scanner with default parameters.
_scanner = SondeScanner(callback=print_result, blacklist=blacklist)
try:
# Oneshot approach.
_result = _scanner.oneshot(first_only = True)
_result = _scanner.oneshot(first_only=True)
print("Oneshot search result: %s" % str(_result))
# Continuous scanning:
@ -915,4 +1086,3 @@ if __name__ == "__main__":
except KeyboardInterrupt:
_scanner.stop()
print("Exited cleanly.")

Wyświetl plik

@ -10,132 +10,204 @@ import hashlib
from dateutil.parser import parse
def fix_datetime(datetime_str, local_dt_str = None):
'''
def fix_datetime(datetime_str, local_dt_str=None):
"""
Given a HH:MM:SS string from a telemetry sentence, produce a complete timestamp, using the current system time as a guide for the date.
'''
"""
if local_dt_str is None:
_now = datetime.datetime.utcnow()
else:
_now = parse(local_dt_str)
if local_dt_str is None:
_now = datetime.datetime.utcnow()
else:
_now = parse(local_dt_str)
# Are we in the rollover window?
if _now.hour == 23 or _now.hour == 0:
_outside_window = False
else:
_outside_window = True
# Are we in the rollover window?
if _now.hour == 23 or _now.hour == 0:
_outside_window = False
else:
_outside_window = True
# Parsing just a HH:MM:SS will return a datetime object with the year, month and day replaced by values in the 'default'
# argument.
_imet_dt = parse(datetime_str, default=_now)
# Parsing just a HH:MM:SS will return a datetime object with the year, month and day replaced by values in the 'default'
# argument.
_imet_dt = parse(datetime_str, default=_now)
if _outside_window:
# We are outside the day-rollover window, and can safely use the current zulu date.
return _imet_dt
else:
# We are within the window, and need to adjust the day backwards or forwards based on the sonde time.
if _imet_dt.hour == 23 and _now.hour == 0:
# Assume system clock running slightly fast, and subtract a day from the telemetry date.
_imet_dt = _imet_dt - datetime.timedelta(days=1)
if _outside_window:
# We are outside the day-rollover window, and can safely use the current zulu date.
return _imet_dt
else:
# We are within the window, and need to adjust the day backwards or forwards based on the sonde time.
if _imet_dt.hour == 23 and _now.hour == 0:
# Assume system clock running slightly fast, and subtract a day from the telemetry date.
_imet_dt = _imet_dt - datetime.timedelta(days=1)
elif _imet_dt.hour == 00 and _now.hour == 23:
# System clock running slow. Add a day.
_imet_dt = _imet_dt + datetime.timedelta(days=1)
elif _imet_dt.hour == 00 and _now.hour == 23:
# System clock running slow. Add a day.
_imet_dt = _imet_dt + datetime.timedelta(days=1)
return _imet_dt
return _imet_dt
#
# iMet Radiosonde Functions
# iMet Radiosonde Functions
#
def imet_unique_id(telemetry, custom=""):
'''
"""
Generate a 'unique' imet radiosonde ID based on the power-on time, frequency, and an optional location code.
This requires the following fields be present in the telemetry dict:
datetime_dt (datetime) (will need to be generated above)
frame (int) - Frame number
freq_float (float) - Frequency in MHz, as a floating point number.
'''
"""
_imet_dt = telemetry['datetime_dt']
_imet_dt = telemetry["datetime_dt"]
# Determine power on time: Current time - number of frames (one frame per second)
_power_on_time = _imet_dt - datetime.timedelta(seconds=telemetry['frame'])
# Determine power on time: Current time - number of frames (one frame per second)
_power_on_time = _imet_dt - datetime.timedelta(seconds=telemetry["frame"])
# Round frequency to the nearest 100 kHz (iMet sondes only have 100 kHz frequency steps)
_freq = round(telemetry['freq_float']*10.0)/10.0
_freq = "%.3f MHz" % _freq
# Round frequency to the nearest 100 kHz (iMet sondes only have 100 kHz frequency steps)
_freq = round(telemetry["freq_float"] * 10.0) / 10.0
_freq = "%.3f MHz" % _freq
# Now we generate a string to hash based on the power-on time, the rounded frequency, and the custom field.
_temp_str = _power_on_time.strftime("%Y-%m-%dT%H:%M:%SZ") + _freq + custom
# Now we generate a string to hash based on the power-on time, the rounded frequency, and the custom field.
_temp_str = _power_on_time.strftime("%Y-%m-%dT%H:%M:%SZ") + _freq + custom
# Calculate a SHA256 hash of the
_hash = hashlib.sha256(_temp_str.encode('ascii')).hexdigest().upper()
# Calculate a SHA256 hash of the
_hash = hashlib.sha256(_temp_str.encode("ascii")).hexdigest().upper()
return "IMET-" + _hash[-8:]
return "IMET-" + _hash[-8:]
#
# DFM Sonde Subtypes
# DFM Sonde Subtypes
#
def decode_dfm_subtype(subtype):
"""
"""
Decode a DFM subtype (0xB through 0xD) into a possible model number.
NOTE: These are best guesses as to the relationship between subtype ID nibble
and actual model number. Graw have said that sonde decoders should not rely on
this nibble for identification.
"""
if '0x' not in subtype:
return "DFM-Unknown"
if subtype == "0x6":
return "DFM06"
elif subtype == '0xA':
return "DFM09"
elif subtype == '0xB':
return "DFM17"
elif subtype == '0xC':
return "DFM09P"
elif subtype == '0xD':
return "DFM17"
else:
# Unknown subtype
return "DFMx" + subtype[-1]
if "0x" not in subtype:
return "DFM-Unknown"
if subtype == "0x6":
return "DFM06"
elif subtype == "0xA":
return "DFM09"
elif subtype == "0xB":
return "DFM17"
elif subtype == "0xC":
return "DFM09P"
elif subtype == "0xD":
return "DFM17"
else:
# Unknown subtype
return "DFMx" + subtype[-1]
if __name__ == "__main__":
# Testing scripts for the above.
# Testing scripts for the above.
test_data = [
{'datetime':'23:59:58', 'frame': 50, 'freq': '402.001 MHz', 'local_dt': "2019-03-01T23:59:58Z"},
{'datetime':'23:59:58', 'frame': 50, 'freq': '401.999 MHz', 'local_dt': "2019-03-01T23:59:57Z"},
{'datetime':'23:59:58', 'frame': 50, 'freq': '402.000 MHz', 'local_dt': "2019-03-02T00:00:03Z"},
{'datetime':'00:00:00', 'frame': 52, 'freq': '402.000 MHz', 'local_dt': "2019-03-01T23:59:57Z"},
{'datetime':'00:00:00', 'frame': 52, 'freq': '402.000 MHz', 'local_dt': "2019-03-02T00:00:03Z"},
{'datetime':'00:00:01', 'frame': 53, 'freq': '402.000 MHz', 'local_dt': "2019-03-01T23:59:57Z"},
{'datetime':'00:00:01', 'frame': 53, 'freq': '402.000 MHz', 'local_dt': "2019-03-02T00:00:03Z"},
{'datetime':'11:59:58', 'frame': 42, 'freq': '402.000 MHz', 'local_dt': "2019-03-01T12:00:03Z"},
{'datetime':'12:00:02', 'frame': 46, 'freq': '402.000 MHz', 'local_dt': "2019-03-01T12:00:03Z"},
#20:12:05Z,IMET-E2596815,3935,33.29227,35.68153,9253.0,-42.8,62.7,iMet,401.998,SATS 10,BATT 5.0
{'datetime':'20:12:05', 'frame': 3935, 'freq': '401.998 MHz', 'local_dt': "2019-03-01T12:00:03Z"},
#20:12:06Z,IMET-E2596815,3937,33.29233,35.68149,9259.0,-42.9,62.1,iMet,401.998,SATS 11,BATT 5.0
{'datetime':'20:12:06', 'frame': 3937, 'freq': '401.998 MHz', 'local_dt': "2019-03-01T12:00:03Z"},
#20:13:03Z,IMET-F46CA05C,4049,33.29655,35.67995,9640.0,-45.9,51.2,iMet,401.999,SATS 10,BATT 5.0
{'datetime':'20:13:03', 'frame': 4049, 'freq': '401.999 MHz', 'local_dt': "2019-03-01T12:00:03Z"},
#20:13:04Z,IMET-F46CA05C,4051,33.29666,35.67995,9646.0,-46.0,51.1,iMet,401.999,SATS 10,BATT 5.0
{'datetime':'20:13:04', 'frame': 4051, 'freq': '401.999 MHz', 'local_dt': "2019-03-01T12:00:03Z"},
]
test_data = [
{
"datetime": "23:59:58",
"frame": 50,
"freq": "402.001 MHz",
"local_dt": "2019-03-01T23:59:58Z",
},
{
"datetime": "23:59:58",
"frame": 50,
"freq": "401.999 MHz",
"local_dt": "2019-03-01T23:59:57Z",
},
{
"datetime": "23:59:58",
"frame": 50,
"freq": "402.000 MHz",
"local_dt": "2019-03-02T00:00:03Z",
},
{
"datetime": "00:00:00",
"frame": 52,
"freq": "402.000 MHz",
"local_dt": "2019-03-01T23:59:57Z",
},
{
"datetime": "00:00:00",
"frame": 52,
"freq": "402.000 MHz",
"local_dt": "2019-03-02T00:00:03Z",
},
{
"datetime": "00:00:01",
"frame": 53,
"freq": "402.000 MHz",
"local_dt": "2019-03-01T23:59:57Z",
},
{
"datetime": "00:00:01",
"frame": 53,
"freq": "402.000 MHz",
"local_dt": "2019-03-02T00:00:03Z",
},
{
"datetime": "11:59:58",
"frame": 42,
"freq": "402.000 MHz",
"local_dt": "2019-03-01T12:00:03Z",
},
{
"datetime": "12:00:02",
"frame": 46,
"freq": "402.000 MHz",
"local_dt": "2019-03-01T12:00:03Z",
},
# 20:12:05Z,IMET-E2596815,3935,33.29227,35.68153,9253.0,-42.8,62.7,iMet,401.998,SATS 10,BATT 5.0
{
"datetime": "20:12:05",
"frame": 3935,
"freq": "401.998 MHz",
"local_dt": "2019-03-01T12:00:03Z",
},
# 20:12:06Z,IMET-E2596815,3937,33.29233,35.68149,9259.0,-42.9,62.1,iMet,401.998,SATS 11,BATT 5.0
{
"datetime": "20:12:06",
"frame": 3937,
"freq": "401.998 MHz",
"local_dt": "2019-03-01T12:00:03Z",
},
# 20:13:03Z,IMET-F46CA05C,4049,33.29655,35.67995,9640.0,-45.9,51.2,iMet,401.999,SATS 10,BATT 5.0
{
"datetime": "20:13:03",
"frame": 4049,
"freq": "401.999 MHz",
"local_dt": "2019-03-01T12:00:03Z",
},
# 20:13:04Z,IMET-F46CA05C,4051,33.29666,35.67995,9646.0,-46.0,51.1,iMet,401.999,SATS 10,BATT 5.0
{
"datetime": "20:13:04",
"frame": 4051,
"freq": "401.999 MHz",
"local_dt": "2019-03-01T12:00:03Z",
},
]
for _test in test_data:
_test['freq_float'] = float(_test['freq'].split(' ')[0])
_test['datetime_dt'] = fix_datetime(_test['datetime'], local_dt_str = _test['local_dt'])
print("Input Time: %s, Local Time: %s, Output Time: %s" % (_test['datetime'], _test['local_dt'], _test['datetime_dt'].strftime("%Y-%m-%dT%H:%M:%SZ")))
_test['id'] = imet_unique_id(_test)
print("Generated ID: %s" % _test['id'])
print(" ")
for _test in test_data:
_test["freq_float"] = float(_test["freq"].split(" ")[0])
_test["datetime_dt"] = fix_datetime(
_test["datetime"], local_dt_str=_test["local_dt"]
)
print(
"Input Time: %s, Local Time: %s, Output Time: %s"
% (
_test["datetime"],
_test["local_dt"],
_test["datetime_dt"].strftime("%Y-%m-%dT%H:%M:%SZ"),
)
)
_test["id"] = imet_unique_id(_test)
print("Generated ID: %s" % _test["id"])
print(" ")

Wyświetl plik

@ -511,7 +511,7 @@
</div>
<div class="row">
<div class='col-12'>
Auto-Follow Latest Sonde: <input type="checkbox" id="sondeAutoFollow" checked> &nbsp; &nbsp; Hide Map: <input type="checkbox" id="hideMap">
Auto-Follow Latest Sonde: <input type="checkbox" id="sondeAutoFollow" checked> &nbsp; &nbsp; Hide Map: <input type="checkbox" id="hideMap">&nbsp; &nbsp; <a href="./rs.kml">Live KML</a>
<div id="sonde_map" style="height:400px;width:100%"></div>
<br>
</div>

Wyświetl plik

@ -10,12 +10,13 @@ import traceback
import socket
import sys
def udp_rx_loop(hostname='localhost', port=50000):
def udp_rx_loop(hostname="localhost", port=50000):
"""
Listen for incoming UDP packets, and emit them via stdout.
"""
s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.settimeout(1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
@ -23,7 +24,7 @@ def udp_rx_loop(hostname='localhost', port=50000):
except:
pass
s.bind((hostname, port))
while True:
try:
m = s.recvfrom(1024)
@ -33,7 +34,7 @@ def udp_rx_loop(hostname='localhost', port=50000):
break
except:
traceback.print_exc()
if m != None:
try:
sys.stdout.write(m[0].decode())
@ -44,6 +45,7 @@ def udp_rx_loop(hostname='localhost', port=50000):
s.close()
if __name__ == "__main__":
#
# Basic UDP listener, used to feed JSON data into auto_rx for debug & testing purposes.
@ -54,5 +56,5 @@ if __name__ == "__main__":
_port = int(sys.argv[1])
else:
_port = 50000
udp_rx_loop(port=_port)

Wyświetl plik

@ -21,6 +21,7 @@ from dateutil.parser import parse
from datetime import datetime, timedelta
from math import radians, degrees, sin, cos, atan2, sqrt, pi
from . import __version__ as auto_rx_version
try:
# Python 2
from Queue import Queue
@ -30,7 +31,19 @@ except ImportError:
# List of binaries we check for on startup
REQUIRED_RS_UTILS = ['dft_detect', 'dfm09mod', 'm10mod', 'imet1rs_dft', 'rs41mod', 'rs92mod', 'fsk_demod', 'mk2a_lms1680', 'lms6Xmod', 'meisei100mod']
REQUIRED_RS_UTILS = [
"dft_detect",
"dfm09mod",
"m10mod",
"imet1rs_dft",
"rs41mod",
"rs92mod",
"fsk_demod",
"mk2a_lms1680",
"lms6Xmod",
"meisei100mod",
]
def check_rs_utils():
""" Check the required RS decoder binaries exist
@ -45,10 +58,12 @@ def check_rs_utils():
AUTORX_VERSION_URL = "https://raw.githubusercontent.com/projecthorus/radiosonde_auto_rx/master/auto_rx/autorx/__init__.py"
def check_autorx_version():
""" Grab the latest __init__ file from Github and compare the version with our current version. """
try:
_r = requests.get(AUTORX_VERSION_URL,timeout=5)
_r = requests.get(AUTORX_VERSION_URL, timeout=5)
except Exception as e:
logging.error("Version - Error determining latest master version - %s" % str(e))
return
@ -56,18 +71,18 @@ def check_autorx_version():
_version = "Unknown"
try:
for _line in _r.text.split('\n'):
for _line in _r.text.split("\n"):
if _line.startswith("__version__"):
_version = _line.split('=')[1]
_version = _version.replace("\"", "").strip()
_version = _line.split("=")[1]
_version = _version.replace('"', "").strip()
break
except Exception as e:
logging.error("Version - Error determining latest master version.")
logging.info("Version - Local Version: %s Current Master Version: %s" % (auto_rx_version, _version))
logging.info(
"Version - Local Version: %s Current Master Version: %s"
% (auto_rx_version, _version)
)
class AsynchronousFileReader(threading.Thread):
@ -114,7 +129,6 @@ class AsynchronousFileReader(threading.Thread):
"""
self.running = False
def readlines(self):
"""
Get currently available lines.
@ -128,8 +142,17 @@ class AsynchronousFileReader(threading.Thread):
#
def detect_peaks(x, mph=None, mpd=1, threshold=0, edge='rising',
kpsh=False, valley=False, show=False, ax=None):
def detect_peaks(
x,
mph=None,
mpd=1,
threshold=0,
edge="rising",
kpsh=False,
valley=False,
show=False,
ax=None,
):
"""Detect peaks in data based on their amplitude and other features.
@ -207,7 +230,7 @@ def detect_peaks(x, mph=None, mpd=1, threshold=0, edge='rising',
>>> detect_peaks(x, threshold = 2, show=True)
"""
x = np.atleast_1d(x).astype('float64')
x = np.atleast_1d(x).astype("float64")
if x.size < 3:
return np.array([], dtype=int)
if valley:
@ -223,26 +246,30 @@ def detect_peaks(x, mph=None, mpd=1, threshold=0, edge='rising',
if not edge:
ine = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) > 0))[0]
else:
if edge.lower() in ['rising', 'both']:
if edge.lower() in ["rising", "both"]:
ire = np.where((np.hstack((dx, 0)) <= 0) & (np.hstack((0, dx)) > 0))[0]
if edge.lower() in ['falling', 'both']:
if edge.lower() in ["falling", "both"]:
ife = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) >= 0))[0]
ind = np.unique(np.hstack((ine, ire, ife)))
# handle NaN's
if ind.size and indnan.size:
# NaN's and values close to NaN's cannot be peaks
ind = ind[np.in1d(ind, np.unique(np.hstack((indnan, indnan-1, indnan+1))), invert=True)]
ind = ind[
np.in1d(
ind, np.unique(np.hstack((indnan, indnan - 1, indnan + 1))), invert=True
)
]
# first and last values of x cannot be peaks
if ind.size and ind[0] == 0:
ind = ind[1:]
if ind.size and ind[-1] == x.size-1:
if ind.size and ind[-1] == x.size - 1:
ind = ind[:-1]
# remove peaks < minimum peak height
if ind.size and mph is not None:
ind = ind[x[ind] >= mph]
# remove peaks - neighbors < threshold
if ind.size and threshold > 0:
dx = np.min(np.vstack([x[ind]-x[ind-1], x[ind]-x[ind+1]]), axis=0)
dx = np.min(np.vstack([x[ind] - x[ind - 1], x[ind] - x[ind + 1]]), axis=0)
ind = np.delete(ind, np.where(dx < threshold)[0])
# detect small peaks closer than minimum peak distance
if ind.size and mpd > 1:
@ -251,8 +278,9 @@ def detect_peaks(x, mph=None, mpd=1, threshold=0, edge='rising',
for i in range(ind.size):
if not idel[i]:
# keep peaks with the same height if kpsh is True
idel = idel | (ind >= ind[i] - mpd) & (ind <= ind[i] + mpd) \
& (x[ind[i]] > x[ind] if kpsh else True)
idel = idel | (ind >= ind[i] - mpd) & (ind <= ind[i] + mpd) & (
x[ind[i]] > x[ind] if kpsh else True
)
idel[i] = 0 # Keep current peak
# remove the small peaks and sort back the indices by their occurrence
ind = np.sort(ind[~idel])
@ -272,93 +300,100 @@ def peak_plot(x, mph, mpd, threshold, edge, valley, ax, ind):
try:
import matplotlib.pyplot as plt
except ImportError:
print('matplotlib is not available.')
print("matplotlib is not available.")
else:
if ax is None:
_, ax = plt.subplots(1, 1, figsize=(8, 4))
ax.plot(x, 'b', lw=1)
ax.plot(x, "b", lw=1)
if ind.size:
label = 'valley' if valley else 'peak'
label = label + 's' if ind.size > 1 else label
ax.plot(ind, x[ind], '+', mfc=None, mec='r', mew=2, ms=8,
label='%d %s' % (ind.size, label))
ax.legend(loc='best', framealpha=.5, numpoints=1)
ax.set_xlim(-.02*x.size, x.size*1.02-1)
label = "valley" if valley else "peak"
label = label + "s" if ind.size > 1 else label
ax.plot(
ind,
x[ind],
"+",
mfc=None,
mec="r",
mew=2,
ms=8,
label="%d %s" % (ind.size, label),
)
ax.legend(loc="best", framealpha=0.5, numpoints=1)
ax.set_xlim(-0.02 * x.size, x.size * 1.02 - 1)
ymin, ymax = x[np.isfinite(x)].min(), x[np.isfinite(x)].max()
yrange = ymax - ymin if ymax > ymin else 1
ax.set_ylim(ymin - 0.1*yrange, ymax + 0.1*yrange)
ax.set_xlabel('Data #', fontsize=14)
ax.set_ylabel('Amplitude', fontsize=14)
mode = 'Valley detection' if valley else 'Peak detection'
ax.set_title("%s (mph=%s, mpd=%d, threshold=%s, edge='%s')"
% (mode, str(mph), mpd, str(threshold), edge))
ax.set_ylim(ymin - 0.1 * yrange, ymax + 0.1 * yrange)
ax.set_xlabel("Data #", fontsize=14)
ax.set_ylabel("Amplitude", fontsize=14)
mode = "Valley detection" if valley else "Peak detection"
ax.set_title(
"%s (mph=%s, mpd=%d, threshold=%s, edge='%s')"
% (mode, str(mph), mpd, str(threshold), edge)
)
# plt.grid()
plt.show()
#
# RTLSDR Utility Functions
#
# Regexes to help parse lsusb's output
_INDENTATION_RE = re.compile(r'^( *)')
_LSUSB_BUS_DEVICE_RE = re.compile(r'^Bus (\d{3}) Device (\d{3}):')
_LSUSB_ENTRY_RE = re.compile(r'^ *([^ ]+) +([^ ]+) *([^ ].*)?$')
_LSUSB_GROUP_RE = re.compile(r'^ *([^ ]+.*):$')
_INDENTATION_RE = re.compile(r"^( *)")
_LSUSB_BUS_DEVICE_RE = re.compile(r"^Bus (\d{3}) Device (\d{3}):")
_LSUSB_ENTRY_RE = re.compile(r"^ *([^ ]+) +([^ ]+) *([^ ].*)?$")
_LSUSB_GROUP_RE = re.compile(r"^ *([^ ]+.*):$")
# USB Reset ioctl argument
_USBDEVFS_RESET = ord('U') << 8 | 20
_USBDEVFS_RESET = ord("U") << 8 | 20
# List of known RTLSDR-Compatible devices, taken from
# https://github.com/steve-m/librtlsdr/blob/master/src/librtlsdr.c#L313
KNOWN_RTLSDR_DEVICES = [
[ '0x0bda', '0x2832', "Generic RTL2832U" ],
[ '0x0bda', '0x2838', "Generic RTL2832U OEM" ],
[ '0x0413', '0x6680', "DigitalNow Quad DVB-T PCI-E card" ],
[ '0x0413', '0x6f0f', "Leadtek WinFast DTV Dongle mini D" ],
[ '0x0458', '0x707f', "Genius TVGo DVB-T03 USB dongle (Ver. B)" ],
[ '0x0ccd', '0x00a9', "Terratec Cinergy T Stick Black (rev 1)" ],
[ '0x0ccd', '0x00b3', "Terratec NOXON DAB/DAB+ USB dongle (rev 1)" ],
[ '0x0ccd', '0x00b4', "Terratec Deutschlandradio DAB Stick" ],
[ '0x0ccd', '0x00b5', "Terratec NOXON DAB Stick - Radio Energy" ],
[ '0x0ccd', '0x00b7', "Terratec Media Broadcast DAB Stick" ],
[ '0x0ccd', '0x00b8', "Terratec BR DAB Stick" ],
[ '0x0ccd', '0x00b9', "Terratec WDR DAB Stick" ],
[ '0x0ccd', '0x00c0', "Terratec MuellerVerlag DAB Stick" ],
[ '0x0ccd', '0x00c6', "Terratec Fraunhofer DAB Stick" ],
[ '0x0ccd', '0x00d3', "Terratec Cinergy T Stick RC (Rev.3)" ],
[ '0x0ccd', '0x00d7', "Terratec T Stick PLUS" ],
[ '0x0ccd', '0x00e0', "Terratec NOXON DAB/DAB+ USB dongle (rev 2)" ],
[ '0x1554', '0x5020', "PixelView PV-DT235U(RN)" ],
[ '0x15f4', '0x0131', "Astrometa DVB-T/DVB-T2" ],
[ '0x15f4', '0x0133', "HanfTek DAB+FM+DVB-T" ],
[ '0x185b', '0x0620', "Compro Videomate U620F"],
[ '0x185b', '0x0650', "Compro Videomate U650F"],
[ '0x185b', '0x0680', "Compro Videomate U680F"],
[ '0x1b80', '0xd393', "GIGABYTE GT-U7300" ],
[ '0x1b80', '0xd394', "DIKOM USB-DVBT HD" ],
[ '0x1b80', '0xd395', "Peak 102569AGPK" ],
[ '0x1b80', '0xd397', "KWorld KW-UB450-T USB DVB-T Pico TV" ],
[ '0x1b80', '0xd398', "Zaapa ZT-MINDVBZP" ],
[ '0x1b80', '0xd39d', "SVEON STV20 DVB-T USB & FM" ],
[ '0x1b80', '0xd3a4', "Twintech UT-40" ],
[ '0x1b80', '0xd3a8', "ASUS U3100MINI_PLUS_V2" ],
[ '0x1b80', '0xd3af', "SVEON STV27 DVB-T USB & FM" ],
[ '0x1b80', '0xd3b0', "SVEON STV21 DVB-T USB & FM" ],
[ '0x1d19', '0x1101', "Dexatek DK DVB-T Dongle (Logilink VG0002A)" ],
[ '0x1d19', '0x1102', "Dexatek DK DVB-T Dongle (MSI DigiVox mini II V3.0)" ],
[ '0x1d19', '0x1103', "Dexatek Technology Ltd. DK 5217 DVB-T Dongle" ],
[ '0x1d19', '0x1104', "MSI DigiVox Micro HD" ],
[ '0x1f4d', '0xa803', "Sweex DVB-T USB" ],
[ '0x1f4d', '0xb803', "GTek T803" ],
[ '0x1f4d', '0xc803', "Lifeview LV5TDeluxe" ],
[ '0x1f4d', '0xd286', "MyGica TD312" ],
[ '0x1f4d', '0xd803', "PROlectrix DV107669" ],
]
["0x0bda", "0x2832", "Generic RTL2832U"],
["0x0bda", "0x2838", "Generic RTL2832U OEM"],
["0x0413", "0x6680", "DigitalNow Quad DVB-T PCI-E card"],
["0x0413", "0x6f0f", "Leadtek WinFast DTV Dongle mini D"],
["0x0458", "0x707f", "Genius TVGo DVB-T03 USB dongle (Ver. B)"],
["0x0ccd", "0x00a9", "Terratec Cinergy T Stick Black (rev 1)"],
["0x0ccd", "0x00b3", "Terratec NOXON DAB/DAB+ USB dongle (rev 1)"],
["0x0ccd", "0x00b4", "Terratec Deutschlandradio DAB Stick"],
["0x0ccd", "0x00b5", "Terratec NOXON DAB Stick - Radio Energy"],
["0x0ccd", "0x00b7", "Terratec Media Broadcast DAB Stick"],
["0x0ccd", "0x00b8", "Terratec BR DAB Stick"],
["0x0ccd", "0x00b9", "Terratec WDR DAB Stick"],
["0x0ccd", "0x00c0", "Terratec MuellerVerlag DAB Stick"],
["0x0ccd", "0x00c6", "Terratec Fraunhofer DAB Stick"],
["0x0ccd", "0x00d3", "Terratec Cinergy T Stick RC (Rev.3)"],
["0x0ccd", "0x00d7", "Terratec T Stick PLUS"],
["0x0ccd", "0x00e0", "Terratec NOXON DAB/DAB+ USB dongle (rev 2)"],
["0x1554", "0x5020", "PixelView PV-DT235U(RN)"],
["0x15f4", "0x0131", "Astrometa DVB-T/DVB-T2"],
["0x15f4", "0x0133", "HanfTek DAB+FM+DVB-T"],
["0x185b", "0x0620", "Compro Videomate U620F"],
["0x185b", "0x0650", "Compro Videomate U650F"],
["0x185b", "0x0680", "Compro Videomate U680F"],
["0x1b80", "0xd393", "GIGABYTE GT-U7300"],
["0x1b80", "0xd394", "DIKOM USB-DVBT HD"],
["0x1b80", "0xd395", "Peak 102569AGPK"],
["0x1b80", "0xd397", "KWorld KW-UB450-T USB DVB-T Pico TV"],
["0x1b80", "0xd398", "Zaapa ZT-MINDVBZP"],
["0x1b80", "0xd39d", "SVEON STV20 DVB-T USB & FM"],
["0x1b80", "0xd3a4", "Twintech UT-40"],
["0x1b80", "0xd3a8", "ASUS U3100MINI_PLUS_V2"],
["0x1b80", "0xd3af", "SVEON STV27 DVB-T USB & FM"],
["0x1b80", "0xd3b0", "SVEON STV21 DVB-T USB & FM"],
["0x1d19", "0x1101", "Dexatek DK DVB-T Dongle (Logilink VG0002A)"],
["0x1d19", "0x1102", "Dexatek DK DVB-T Dongle (MSI DigiVox mini II V3.0)"],
["0x1d19", "0x1103", "Dexatek Technology Ltd. DK 5217 DVB-T Dongle"],
["0x1d19", "0x1104", "MSI DigiVox Micro HD"],
["0x1f4d", "0xa803", "Sweex DVB-T USB"],
["0x1f4d", "0xb803", "GTek T803"],
["0x1f4d", "0xc803", "Lifeview LV5TDeluxe"],
["0x1f4d", "0xd286", "MyGica TD312"],
["0x1f4d", "0xd803", "PROlectrix DV107669"],
]
def lsusb():
@ -368,11 +403,11 @@ def lsusb():
(list): List of dictionaries containing the device information for each USB device.
"""
try:
FNULL = open(os.devnull, 'w')
lsusb_raw_output = subprocess.check_output(['lsusb', '-v'], stderr=FNULL)
FNULL = open(os.devnull, "w")
lsusb_raw_output = subprocess.check_output(["lsusb", "-v"], stderr=FNULL)
FNULL.close()
# Convert from bytes.
lsusb_raw_output = lsusb_raw_output.decode('utf8')
lsusb_raw_output = lsusb_raw_output.decode("utf8")
except Exception as e:
logging.error("lsusb parse error - %s" % str(e))
return
@ -390,10 +425,7 @@ def lsusb():
if not device:
m = _LSUSB_BUS_DEVICE_RE.match(line)
if m:
device = {
'bus': m.group(1),
'device': m.group(2)
}
device = {"bus": m.group(1), "device": m.group(2)}
depth_stack = [device]
continue
@ -421,8 +453,8 @@ def lsusb():
m = _LSUSB_ENTRY_RE.match(line)
if m:
new_entry = {
'_value': m.group(2),
'_desc': m.group(3),
"_value": m.group(2),
"_desc": m.group(3),
}
cur[m.group(1)] = new_entry
depth_stack.append(new_entry)
@ -431,7 +463,7 @@ def lsusb():
logging.debug('lsusb parsing error: unrecognized line: "%s"', line)
if device:
devices.append(device)
devices.append(device)
return devices
@ -442,13 +474,13 @@ def is_not_linux():
where lsusb isn't going to work.
"""
# Basic check for non-Linux platforms (e.g. Darwin or Windows)
if platform.system() != 'Linux':
if platform.system() != "Linux":
return True
# Second check for the existence of '-Microsoft' in the uname release field.
# This is a good check that we are running in WSL.
# Note the use of indexing instead of the named field, for Python 2 & 3 compatability.
if 'Microsoft' in platform.uname()[2]:
if "Microsoft" in platform.uname()[2]:
return True
# Else, we're probably in native Linux!
@ -457,9 +489,9 @@ def is_not_linux():
def reset_usb(bus, device):
"""Reset the USB device with the given bus and device."""
usb_file_path = '/dev/bus/usb/%03d/%03d' % (bus, device)
with open(usb_file_path, 'w') as usb_file:
#logging.debug('fcntl.ioctl(%s, %d)', usb_file_path, _USBDEVFS_RESET)
usb_file_path = "/dev/bus/usb/%03d/%03d" % (bus, device)
with open(usb_file_path, "w") as usb_file:
# logging.debug('fcntl.ioctl(%s, %d)', usb_file_path, _USBDEVFS_RESET)
try:
fcntl.ioctl(usb_file, _USBDEVFS_RESET)
@ -467,7 +499,7 @@ def reset_usb(bus, device):
logging.error("RTLSDR - USB Reset Failed.")
def is_rtlsdr(vid,pid):
def is_rtlsdr(vid, pid):
""" Check if a device with given VID/PID is a known RTLSDR """
for _dev in KNOWN_RTLSDR_DEVICES:
_vid = _dev[0]
@ -492,20 +524,23 @@ def reset_rtlsdr_by_serial(serial):
for device in lsusb_info:
try:
device_serial = device['Device Descriptor']['iSerial']['_desc']
device_product = device['Device Descriptor']['iProduct']['_desc']
device_pid = device['Device Descriptor']['idProduct']['_value']
device_vid = device['Device Descriptor']['idVendor']['_value']
device_serial = device["Device Descriptor"]["iSerial"]["_desc"]
device_product = device["Device Descriptor"]["iProduct"]["_desc"]
device_pid = device["Device Descriptor"]["idProduct"]["_value"]
device_vid = device["Device Descriptor"]["idVendor"]["_value"]
except:
# If we hit an exception, the device likely doesn't have one of the required fields.
continue
if (device_serial == serial) and is_rtlsdr(device_vid, device_pid) :
bus_num = int(device['bus'])
device_num = int(device['device'])
if (device_serial == serial) and is_rtlsdr(device_vid, device_pid):
bus_num = int(device["bus"])
device_num = int(device["device"])
if bus_num and device_num:
logging.info("RTLSDR - Attempting to reset: /dev/bus/usb/%03d/%03d" % (bus_num, device_num))
logging.info(
"RTLSDR - Attempting to reset: /dev/bus/usb/%03d/%03d"
% (bus_num, device_num)
)
reset_usb(bus_num, device_num)
else:
logging.error("RTLSDR - Could not find RTLSDR with serial %s!" % serial)
@ -526,10 +561,10 @@ def find_rtlsdr(serial=None):
for device in lsusb_info:
try:
device_serial = device['Device Descriptor']['iSerial']['_desc']
device_product = device['Device Descriptor']['iProduct']['_desc']
device_pid = device['Device Descriptor']['idProduct']['_value']
device_vid = device['Device Descriptor']['idVendor']['_value']
device_serial = device["Device Descriptor"]["iSerial"]["_desc"]
device_product = device["Device Descriptor"]["iProduct"]["_desc"]
device_pid = device["Device Descriptor"]["idProduct"]["_value"]
device_vid = device["Device Descriptor"]["idVendor"]["_value"]
except:
# If we hit an exception, the device likely doesn't have one of the required fields.
continue
@ -539,9 +574,9 @@ def find_rtlsdr(serial=None):
if serial == None:
return True
else:
if (device_serial == serial):
bus_num = int(device['bus'])
device_num = int(device['device'])
if device_serial == serial:
bus_num = int(device["bus"])
device_num = int(device["device"])
if bus_num and device_num:
# We have found an RTLSDR with this serial number!
@ -566,26 +601,28 @@ def reset_all_rtlsdrs():
for device in lsusb_info:
try:
device_product = device['Device Descriptor']['iProduct']['_desc']
device_pid = device['Device Descriptor']['idProduct']['_value']
device_vid = device['Device Descriptor']['idVendor']['_value']
device_product = device["Device Descriptor"]["iProduct"]["_desc"]
device_pid = device["Device Descriptor"]["idProduct"]["_value"]
device_vid = device["Device Descriptor"]["idVendor"]["_value"]
except:
# If we hit an exception, the device likely doesn't have one of the required fields.
continue
if is_rtlsdr(device_vid, device_pid) :
bus_num = int(device['bus'])
device_num = int(device['device'])
if is_rtlsdr(device_vid, device_pid):
bus_num = int(device["bus"])
device_num = int(device["device"])
logging.info("RTLSDR - Attempting to reset: Bus: %d Device: %d" % (bus_num, device_num))
logging.info(
"RTLSDR - Attempting to reset: Bus: %d Device: %d"
% (bus_num, device_num)
)
reset_usb(bus_num, device_num)
if device_num is None:
logging.error("RTLSDR - Could not find any RTLSDR devices to reset!")
def rtlsdr_test(device_idx='0', rtl_sdr_path="rtl_sdr", retries = 5):
def rtlsdr_test(device_idx="0", rtl_sdr_path="rtl_sdr", retries=5):
""" Test that a RTLSDR with supplied device ID is accessible.
This function attempts to read a small set of samples from a rtlsdr using rtl-sdr.
@ -602,24 +639,28 @@ def rtlsdr_test(device_idx='0', rtl_sdr_path="rtl_sdr", retries = 5):
# Immediately return true for any SDR with a device ID that starts with TCP,
# as this indicates this is not actually a RTLSDR, but a client connecting to some other
# SDR server.
if device_idx.startswith('TCP'):
if device_idx.startswith("TCP"):
logging.debug("RTLSDR - TCP Device, skipping RTLSDR test step.")
return True
_rtl_cmd = "timeout 5 %s -d %s -n 200000 - > /dev/null" % (rtl_sdr_path, str(device_idx))
_rtl_cmd = "timeout 5 %s -d %s -n 200000 - > /dev/null" % (
rtl_sdr_path,
str(device_idx),
)
# First, check if the RTLSDR with a provided serial number is present.
if device_idx == '0':
if device_idx == "0":
# Check for the presence of any RTLSDRs.
_rtl_exists = find_rtlsdr()
else:
# Otherwise, look for a particular RTLSDR
_rtl_exists = find_rtlsdr(device_idx)
if not _rtl_exists:
logging.error("RTLSDR - RTLSDR with serial #%s is not present!" % str(device_idx))
logging.error(
"RTLSDR - RTLSDR with serial #%s is not present!" % str(device_idx)
)
return False
# So now we know the rtlsdr we are attempting to test does exist.
@ -629,12 +670,12 @@ def rtlsdr_test(device_idx='0', rtl_sdr_path="rtl_sdr", retries = 5):
while _rtlsdr_retries > 0:
try:
FNULL = open(os.devnull, 'w') # Inhibit stderr output
FNULL = open(os.devnull, "w") # Inhibit stderr output
_ret_code = subprocess.check_call(_rtl_cmd, shell=True, stderr=FNULL)
FNULL.close()
except subprocess.CalledProcessError:
# This exception means the subprocess has returned an error code of one.
# This indicates either the RTLSDR doesn't exist, or
# This indicates either the RTLSDR doesn't exist, or
pass
else:
# rtl-sdr returned OK. We can return True now.
@ -643,7 +684,7 @@ def rtlsdr_test(device_idx='0', rtl_sdr_path="rtl_sdr", retries = 5):
# If we get here, it means we failed to read any samples from the RTLSDR.
# So, we attempt to reset it.
if device_idx == '0':
if device_idx == "0":
reset_all_rtlsdrs()
else:
reset_rtlsdr_by_serial(device_idx)
@ -653,7 +694,10 @@ def rtlsdr_test(device_idx='0', rtl_sdr_path="rtl_sdr", retries = 5):
time.sleep(2)
# If we run out of retries, clearly the RTLSDR isn't working.
logging.error("RTLSDR - RTLSDR with serial #%s was not recovered after %d reset attempts." % (str(device_idx),retries))
logging.error(
"RTLSDR - RTLSDR with serial #%s was not recovered after %d reset attempts."
% (str(device_idx), retries)
)
return False
@ -726,7 +770,8 @@ def position_info(listener, balloon):
bearing += 2 * pi
return {
"listener": listener, "balloon": balloon,
"listener": listener,
"balloon": balloon,
"listener_radians": (lat1, lon1, alt1),
"balloon_radians": (lat2, lon2, alt2),
"angle_at_centre": degrees(angle_at_centre),
@ -736,7 +781,7 @@ def position_info(listener, balloon):
"great_circle_distance": great_circle_distance,
"straight_distance": distance,
"elevation": degrees(elevation),
"elevation_radians": elevation
"elevation_radians": elevation,
}
@ -752,15 +797,15 @@ def peak_decimation(freq, power, factor):
tuple: (freq, power)
"""
_out_len = len(freq)//factor
_out_len = len(freq) // factor
_freq_out =[]
_freq_out = []
_power_out = []
try:
for i in range(_out_len):
_f_slice = freq[i*factor : i*factor + factor]
_p_slice = power[i*factor : i*factor + factor]
_f_slice = freq[i * factor : i * factor + factor]
_p_slice = power[i * factor : i * factor + factor]
_freq_out.append(_f_slice[np.argmax(_p_slice)])
_power_out.append(_p_slice.max())
@ -772,5 +817,8 @@ def peak_decimation(freq, power, factor):
if __name__ == "__main__":
import sys
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
check_autorx_version()

Wyświetl plik

@ -13,6 +13,7 @@ import random
import requests
import time
import traceback
import sys
import autorx
import autorx.config
import autorx.scan
@ -21,6 +22,15 @@ from threading import Thread
import flask
from flask import request, abort
from flask_socketio import SocketIO
try:
from simplekml import Kml, AltitudeMode
except ImportError:
print(
"Could not import simplekml! Try running: sudo pip3 install -r requirements.txt"
)
sys.exit(1)
try:
# Python 2
from Queue import Queue
@ -29,10 +39,14 @@ except ImportError:
from queue import Queue
# Inhibit Flask warning message about running a development server... (we know!)
cli = sys.modules["flask.cli"]
cli.show_server_banner = lambda *x: None
# Instantiate our Flask app.
app = flask.Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
app.config['TEMPLATES_AUTO_RELOAD'] = True
app.config["SECRET_KEY"] = "secret!"
app.config["TEMPLATES_AUTO_RELOAD"] = True
app.jinja_env.auto_reload = True
# This thread will hold the currently running flask application thread.
flask_app_thread = None
@ -40,7 +54,7 @@ flask_app_thread = None
flask_shutdown_key = "temp"
# SocketIO instance
socketio = SocketIO(app, async_mode='threading')
socketio = SocketIO(app, async_mode="threading")
# Global store of telemetry data, which we will add data to and manage.
# Under each key (which will be the sonde ID), we will have a dictionary containing:
@ -56,16 +70,18 @@ flask_telemetry_store = {}
#
def flask_emit_event(event_name="none", data={}):
""" Emit a socketio event to any clients. """
socketio.emit(event_name, data, namespace='/update_status')
socketio.emit(event_name, data, namespace="/update_status")
#
# Flask Routes
#
@app.route("/")
def flask_index():
""" Render main index page """
return flask.render_template('index.html')
return flask.render_template("index.html")
@app.route("/get_version")
@ -78,22 +94,24 @@ def flask_get_version():
def flask_get_task_list():
""" Return the current list of active SDRs, and their active task names """
# Read in the task list, index by SDR ID.
# Read in the task list, index by SDR ID.
_task_list = {}
for _task in autorx.task_list.keys():
_task_list[str(autorx.task_list[_task]['device_idx'])] = _task
_task_list[str(autorx.task_list[_task]["device_idx"])] = _task
# Now, for each configured SDR, determine what task it is currently performing
_sdr_list = {}
for _sdr in autorx.sdr_list.keys():
_sdr_list[str(_sdr)] = 'Not Tasked'
_sdr_list[str(_sdr)] = "Not Tasked"
if str(_sdr) in _task_list:
if _task_list[str(_sdr)] == 'SCAN':
_sdr_list[str(_sdr)] = 'Scanning'
if _task_list[str(_sdr)] == "SCAN":
_sdr_list[str(_sdr)] = "Scanning"
else:
try:
_sdr_list[str(_sdr)] = "Decoding (%.3f MHz)" % (_task_list[str(_sdr)]/1e6)
_sdr_list[str(_sdr)] = "Decoding (%.3f MHz)" % (
_task_list[str(_sdr)] / 1e6
)
except:
_sdr_list[str(_sdr)] = "Decoding (?? MHz)"
@ -101,6 +119,78 @@ def flask_get_task_list():
return json.dumps(_sdr_list)
@app.route("/rs.kml")
def flask_get_kml():
""" Return KML with autorefresh """
_config = autorx.config.global_config
kml = Kml()
netlink = kml.newnetworklink(name="Radiosonde AutoRX")
netlink.open = 1
netlink.link.href = flask.request.host_url + "rs_feed.kml"
try:
netlink.link.refreshinterval = _config["kml_refresh_rate"]
except KeyError:
netlink.link.refreshinterval = 10
netlink.link.refreshmode = "onInterval"
return kml.kml(), 200, {"content-type": "application/vnd.google-earth.kml+xml"}
@app.route("/rs_feed.kml")
def flask_get_kml_feed():
""" Return KML with RS telemetry """
kml = Kml()
for rs_id in flask_telemetry_store:
try:
coordinates = []
for tp in flask_telemetry_store[rs_id]["track"].track_history:
coordinates.append((tp[2], tp[1], tp[3]))
rs_data = """\
{type}/{subtype}
Frequency: {freq}
Altitude: {alt}m
Heading: {heading}
Velocity: {vel_h}
Vertical speed: {vel_v}
Temperature: {temp}C
Humidity: {humidity}%
Pressure: {pressure}hPa
"""
if flask_telemetry_store[rs_id]["latest_telem"]["vel_v"] > -5:
icon = flask.request.host_url + "static/img/balloon-green.png"
else:
icon = flask.request.host_url + "static/img/parachute-green.png"
pnt = kml.newpoint(
name=rs_id,
altitudemode=AltitudeMode.absolute,
description=rs_data.format(
**flask_telemetry_store[rs_id]["latest_telem"]
),
)
pnt.iconstyle.icon.href = icon
pnt.coords = [
(
flask_telemetry_store[rs_id]["latest_telem"]["lon"],
flask_telemetry_store[rs_id]["latest_telem"]["lat"],
flask_telemetry_store[rs_id]["latest_telem"]["alt"],
)
]
linestring = kml.newlinestring(name=rs_id)
linestring.coords = coordinates
linestring.altitudemode = AltitudeMode.absolute
linestring.extrude = 1
linestring.stylemap.normalstyle.linestyle.color = "ff03bafc"
linestring.stylemap.highlightstyle.linestyle.color = "ff03bafc"
linestring.stylemap.normalstyle.polystyle.color = "AA03bafc"
linestring.stylemap.highlightstyle.polystyle.color = "CC03bafc"
except Exception as e:
print(e)
return kml.kml(), 200, {"content-type": "application/vnd.google-earth.kml+xml"}
@app.route("/get_config")
def flask_get_config():
""" Return a copy of the current auto_rx configuration """
@ -123,7 +213,7 @@ def flask_get_telemetry_archive():
# Make a copy of the store, and remove the non-serialisable GenericTrack object
_temp_store = copy.deepcopy(flask_telemetry_store)
for _element in _temp_store:
_temp_store[_element].pop('track')
_temp_store[_element].pop("track")
return json.dumps(_temp_store)
@ -134,7 +224,7 @@ def shutdown_flask(shutdown_key):
global flask_shutdown_key
# Only shutdown if the supplied key matches our shutdown key
if shutdown_key == flask_shutdown_key:
flask.request.environ.get('werkzeug.server.shutdown')()
flask.request.environ.get("werkzeug.server.shutdown")()
return ""
@ -143,16 +233,17 @@ def shutdown_flask(shutdown_key):
# Control Endpoints.
#
@app.route('/start_decoder', methods=['POST'])
@app.route("/start_decoder", methods=["POST"])
def flask_start_decoder():
""" Inject a scan result, which will cause a decoder to be started if there
are enough resources (SDRs) to do so.
Example:
curl -d "type=DFM&freq=403240000" -X POST http://localhost:5000/start_decoder
"""
if request.method == 'POST' and autorx.config.global_config['web_control']:
_type = str(request.form['type'])
_freq = float(request.form['freq'])
if request.method == "POST" and autorx.config.global_config["web_control"]:
_type = str(request.form["type"])
_freq = float(request.form["freq"])
logging.info("Web - Got decoder start request: %s, %f" % (_type, _freq))
@ -163,19 +254,19 @@ def flask_start_decoder():
abort(403)
@app.route('/stop_decoder', methods=['POST'])
@app.route("/stop_decoder", methods=["POST"])
def flask_stop_decoder():
""" Request that a decoder process be halted.
Example:
curl -d "freq=403250000" -X POST http://localhost:5000/stop_decoder
"""
if request.method == 'POST' and autorx.config.global_config['web_control']:
_freq = float(request.form['freq'])
if request.method == "POST" and autorx.config.global_config["web_control"]:
_freq = float(request.form["freq"])
logging.info("Web - Got decoder stop request: %f" % (_freq))
if _freq in autorx.task_list:
autorx.task_list[_freq]['task'].stop()
autorx.task_list[_freq]["task"].stop()
return "OK"
else:
# If we aren't running a decoder, 404.
@ -184,36 +275,36 @@ def flask_stop_decoder():
abort(403)
@app.route('/disable_scanner', methods=['POST'])
@app.route("/disable_scanner", methods=["POST"])
def flask_disable_scanner():
""" Disable and Halt a Scanner, if one is running. """
if request.method == 'POST' and autorx.config.global_config['web_control']:
if 'SCAN' not in autorx.task_list:
if request.method == "POST" and autorx.config.global_config["web_control"]:
if "SCAN" not in autorx.task_list:
# No scanner thread running!
abort(404)
else:
logging.info("Web - Got scanner stop request.")
# Set the scanner inhibit flag so it doesn't automatically start again.
autorx.scan_inhibit = True
_scan_sdr = autorx.task_list['SCAN']['device_idx']
_scan_sdr = autorx.task_list["SCAN"]["device_idx"]
# Stop the scanner.
autorx.task_list['SCAN']['task'].stop()
autorx.task_list["SCAN"]["task"].stop()
# Relase the SDR.
autorx.sdr_list[_scan_sdr]['in_use'] = False
autorx.sdr_list[_scan_sdr]['task'] = None
autorx.sdr_list[_scan_sdr]["in_use"] = False
autorx.sdr_list[_scan_sdr]["task"] = None
# Remove the scanner task from the task list
autorx.task_list.pop('SCAN')
autorx.task_list.pop("SCAN")
return "OK"
else:
abort(403)
@app.route('/enable_scanner', methods=['POST'])
@app.route("/enable_scanner", methods=["POST"])
def flask_enable_scanner():
""" Re-enable the Scanner """
if request.method == 'POST' and autorx.config.global_config['web_control']:
if request.method == "POST" and autorx.config.global_config["web_control"]:
# We re-enable the scanner by clearing the scan_inhibit flag.
# This makes it start up on the next run of clean_task_list (approx every 2 seconds)
# unless one is already running.
@ -222,17 +313,19 @@ def flask_enable_scanner():
else:
abort(403)
#
# SocketIO Events
#
@socketio.on('client_connected', namespace='/update_status')
@socketio.on("client_connected", namespace="/update_status")
def refresh_client(arg1):
""" A client has connected, let them know to grab data."""
logging.info("Flask - New Web Client connected!")
# Tell them to get a copy of the latest scan results.
flask_emit_event('scan_event')
flask_emit_event('task_event')
flask_emit_event("scan_event")
flask_emit_event("task_event")
# TODO: Send last few log entries?
@ -240,73 +333,83 @@ def refresh_client(arg1):
# Flask Startup & Shutdown Helper Scripts
#
def flask_thread(host='0.0.0.0', port=5000):
def flask_thread(host="0.0.0.0", port=5000):
""" Flask Server Thread"""
socketio.run(app, host=host, port=port)
def start_flask(host='0.0.0.0', port=5000):
def start_flask(host="0.0.0.0", port=5000):
""" Start up the Flask Server """
global flask_app_thread, flask_shutdown_key
# Generate the shutdown key
flask_shutdown_key = str(random.randint(10000,100000000))
flask_shutdown_key = str(random.randint(10000, 100000000))
# Start up Flask
flask_app_thread = Thread(target=flask_thread, kwargs={'host':host, 'port':port})
flask_app_thread = Thread(target=flask_thread, kwargs={"host": host, "port": port})
flask_app_thread.start()
logging.info("Started Flask server on http://%s:%d" % (host,port))
logging.info("Started Flask server on http://%s:%d" % (host, port))
def stop_flask(host='0.0.0.0', port=5000):
def stop_flask(host="0.0.0.0", port=5000):
""" Shutdown the Flask Server by submmitting a shutdown request """
global flask_shutdown_key
try:
r = requests.get('http://%s:%d/shutdown/%s' % (host,port, flask_shutdown_key))
r = requests.get("http://%s:%d/shutdown/%s" % (host, port, flask_shutdown_key))
logging.info("Web - Flask Server Shutdown.")
except:
# TODO: Cleanup errors
traceback.print_exc()
class WebHandler(logging.Handler):
""" Logging Handler for sending log messages via Socket.IO to a Web Client """
def emit(self, record):
""" Emit a log message via SocketIO """
if 'socket.io' not in record.msg:
if "socket.io" not in record.msg:
# Convert log record into a dictionary
log_data = {
'level': record.levelname,
'timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
'msg': record.msg
"level": record.levelname,
"timestamp": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
"msg": record.msg,
}
# Emit to all socket.io clients
socketio.emit('log_event', log_data, namespace='/update_status')
socketio.emit("log_event", log_data, namespace="/update_status")
class WebExporter(object):
""" Push Radiosonde Telemetry Data to a web client """
# We require the following fields to be present in the incoming telemetry dictionary data
REQUIRED_FIELDS = ['frame', 'id', 'datetime', 'lat', 'lon', 'alt', 'temp', 'type', 'freq', 'freq_float', 'datetime_dt']
REQUIRED_FIELDS = [
"frame",
"id",
"datetime",
"lat",
"lon",
"alt",
"temp",
"type",
"freq",
"freq_float",
"datetime_dt",
]
def __init__(self,
max_age = 120):
def __init__(self, max_age=120):
""" Initialise a WebExporter object.
Args:
max_age: Store telemetry data up to X hours old
"""
self.max_age = max_age*60
self.max_age = max_age * 60
self.input_queue = Queue()
# Start the input queue processing thread.
self.input_processing_running = True
self.input_thread = Thread(target=self.process_queue)
self.input_thread.start()
def process_queue(self):
""" Process data from the input queue.
@ -324,8 +427,7 @@ class WebExporter(object):
logging.debug("WebExporter - Closed Processing thread.")
def handle_telemetry(self,telemetry):
def handle_telemetry(self, telemetry):
""" Send incoming telemetry to clients, and add it to the telemetry store. """
global flask_telemetry_store
@ -335,46 +437,59 @@ class WebExporter(object):
for _field in self.REQUIRED_FIELDS:
if _field not in telemetry:
logging.error("WebExporter - JSON object missing required field %s" % _field)
logging.error(
"WebExporter - JSON object missing required field %s" % _field
)
return
_telem = telemetry.copy()
if 'f_centre' in _telem:
if "f_centre" in _telem:
# We have an estimate of the sonde's centre frequency from the modem, use this in place of
# the RX frequency.
# Round to 1 kHz
_freq = round(telemetry['f_centre']/1000.0)
_freq = round(telemetry["f_centre"] / 1000.0)
# Convert to MHz.
_telem['freq'] = "%.3f MHz" % (_freq/1e3)
_telem["freq"] = "%.3f MHz" % (_freq / 1e3)
# Add the telemetry information to the global telemetry store
if _telem['id'] not in flask_telemetry_store:
flask_telemetry_store[_telem['id']] = {'timestamp':time.time(), 'latest_telem':_telem, 'path':[], 'track': GenericTrack()}
if _telem["id"] not in flask_telemetry_store:
flask_telemetry_store[_telem["id"]] = {
"timestamp": time.time(),
"latest_telem": _telem,
"path": [],
"track": GenericTrack(),
}
flask_telemetry_store[_telem['id']]['path'].append([_telem['lat'],_telem['lon'],_telem['alt']])
flask_telemetry_store[_telem['id']]['latest_telem'] = _telem
flask_telemetry_store[_telem['id']]['timestamp'] = time.time()
flask_telemetry_store[_telem["id"]]["path"].append(
[_telem["lat"], _telem["lon"], _telem["alt"]]
)
flask_telemetry_store[_telem["id"]]["latest_telem"] = _telem
flask_telemetry_store[_telem["id"]]["timestamp"] = time.time()
# Update the sonde's track and extract the current state.
flask_telemetry_store[_telem['id']]['track'].add_telemetry({'time': _telem['datetime_dt'], 'lat':_telem['lat'], 'lon': _telem['lon'], 'alt':_telem['alt']})
_telem_state = flask_telemetry_store[_telem['id']]['track'].get_latest_state()
flask_telemetry_store[_telem["id"]]["track"].add_telemetry(
{
"time": _telem["datetime_dt"],
"lat": _telem["lat"],
"lon": _telem["lon"],
"alt": _telem["alt"],
}
)
_telem_state = flask_telemetry_store[_telem["id"]]["track"].get_latest_state()
# Add the calculated vertical and horizontal velocity, and heading to the telemetry dict.
_telem['vel_v'] = _telem_state['ascent_rate']
_telem['vel_h'] = _telem_state['speed']
_telem['heading'] = _telem_state['heading']
_telem["vel_v"] = _telem_state["ascent_rate"]
_telem["vel_h"] = _telem_state["speed"]
_telem["heading"] = _telem_state["heading"]
# Remove the datetime object that is part of the telemetry, if it exists.
# (it might not be present in test data)
if 'datetime_dt' in _telem:
_telem.pop('datetime_dt')
if "datetime_dt" in _telem:
_telem.pop("datetime_dt")
# Pass it on to the client.
socketio.emit('telemetry_event', _telem, namespace='/update_status')
socketio.emit("telemetry_event", _telem, namespace="/update_status")
def clean_telemetry_store(self):
""" Remove any old data from the telemetry store """
@ -385,11 +500,9 @@ class WebExporter(object):
for _id in _telem_ids:
# If the most recently telemetry is older than self.max_age, remove all data for
# that sonde from the archive.
if (_now - flask_telemetry_store[_id]['timestamp']) > self.max_age:
if (_now - flask_telemetry_store[_id]["timestamp"]) > self.max_age:
flask_telemetry_store.pop(_id)
logging.debug("WebExporter - Removed Sonde #%s from archive." % _id)
def add(self, telemetry):
# Add it to the queue if we are running.
@ -398,13 +511,11 @@ class WebExporter(object):
else:
logging.error("WebExporter - Processing not running, discarding.")
def update_station_position(self, lat, lon, alt):
""" Update the internal station position record. Used when determining the station position by GPSD """
self.station_position = (lat, lon, alt)
_position = {'lat':lat, 'lon':lon, 'alt':alt}
socketio.emit('station_update', _position, namespace='/update_status')
_position = {"lat": lat, "lon": lon, "alt": alt}
socketio.emit("station_update", _position, namespace="/update_status")
def close(self):
""" Shutdown """
@ -415,6 +526,7 @@ class WebExporter(object):
# Testing Functions, for easier web development.
#
def test_web_log_to_dict(log_line):
""" Convert a line read from a sonde log to a 'fake' telemetery dictionary """
@ -422,19 +534,19 @@ def test_web_log_to_dict(log_line):
# ('2017-12-29T23:20:47.420', 'M2913212', 1563, -34.94541, 138.52819, 761.7, -273., 'RS92', 401.52)
try:
_telem = {
'frame': log_line[2],
'id': log_line[1],
'datetime': log_line[0],
'lat': log_line[3],
'lon': log_line[4],
'alt': log_line[5],
'temp': log_line[6],
'type': log_line[7],
'freq': str(log_line[8])+" MHz",
'freq_float': log_line[8],
'vel_v': 0.0,
'datetime_dt': None,
'sdr_device_idx': '00000001'
"frame": log_line[2],
"id": log_line[1],
"datetime": log_line[0],
"lat": log_line[3],
"lon": log_line[4],
"alt": log_line[5],
"temp": log_line[6],
"type": log_line[7],
"freq": str(log_line[8]) + " MHz",
"freq_float": log_line[8],
"vel_v": 0.0,
"datetime_dt": None,
"sdr_device_idx": "00000001",
}
return _telem
except:
@ -444,6 +556,7 @@ def test_web_log_to_dict(log_line):
def test_web_interface(file_list, delay=1.0):
""" Test the web interface map functions by injecting a large amount of sonde telemetry data from sonde log files. """
import numpy as np
global _web
print(file_list)
@ -455,7 +568,7 @@ def test_web_interface(file_list, delay=1.0):
# Read in files and add data to _sondes.
for _file_name in file_list:
try:
_data = np.genfromtxt(_file_name, delimiter=',', dtype=None)
_data = np.genfromtxt(_file_name, delimiter=",", dtype=None)
_sondes.append(_data)
print("Read %d records from %s" % (len(_data), _file_name))
if len(_data) < _min_data:
@ -464,7 +577,7 @@ def test_web_interface(file_list, delay=1.0):
print("Could not read %s" % _file_name)
# Number of data points to feed in initially. (10%)
_i = _min_data//10
_i = _min_data // 10
# Start up a WebExporter instance
_web = WebExporter()
@ -472,34 +585,36 @@ def test_web_interface(file_list, delay=1.0):
# Feed in the first 10% of data points from each sonde.
print("Injecting %d initial data points." % _i)
for _sonde in _sondes:
for _j in range(0,_i):
for _j in range(0, _i):
_web.add(test_web_log_to_dict(_sonde[_j]))
# Now add in new data every second until CTRL-C
for _k in range(_i,_min_data):
for _k in range(_i, _min_data):
for _sonde in _sondes:
_web.add(test_web_log_to_dict(_sonde[_k]))
logging.info("Added new telemetry data: %d/%d" % (_k,_min_data))
logging.info("Added new telemetry data: %d/%d" % (_k, _min_data))
time.sleep(delay)
if __name__ == "__main__":
# Test script to start up the flask server and show some dummy log data
# This script should be called from the auto_rx directory with:
# python -m autorx.web filename1_sonde.log filename2_sonde.log ..etc
#
#
import time, sys
from autorx.config import read_auto_rx_config
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)
logging.getLogger('werkzeug').setLevel(logging.ERROR)
logging.getLogger('socketio').setLevel(logging.ERROR)
logging.getLogger('engineio').setLevel(logging.ERROR)
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
)
logging.getLogger("werkzeug").setLevel(logging.ERROR)
logging.getLogger("socketio").setLevel(logging.ERROR)
logging.getLogger("engineio").setLevel(logging.ERROR)
# Read in config, as the web interface now uses a lot of config data during startup.
# TODO: Make this actually work... it doesnt seem to be writing into the global_config store
#_temp_cfg = read_auto_rx_config('station.cfg')
# _temp_cfg = read_auto_rx_config('station.cfg')
web_handler = WebHandler()
logging.getLogger().addHandler(web_handler)
@ -515,5 +630,3 @@ if __name__ == "__main__":
logging.info("This is a test message.")
except:
stop_flask()

Wyświetl plik

@ -170,7 +170,7 @@ aprs_server = radiosondy.info
#
# Using <id> means the uploads from multiple stations remain consistent, and we don't end up with
# lots of duplicate sondes on APRS-IS. If you enable the station location beaconing (below), maps
# like aprs.fi will show your station as the receiver.
# like aprs.fi and radiosondy.info will show your station as the receiver.
#
# If used, this field should be either a callsign with a -11 or -12 suffix (i.e. N0CALL-12),
# or <id>, which will be replaced with the radiosondes serial number.
@ -346,6 +346,8 @@ archive_age = 120
# Do not set this to True on an internet-facing auto_rx instance!!!
web_control = False
# KML refresh rate
kml_refresh_rate = 10
##################
# DEBUG SETTINGS #