First pass of complete scanner/decoder, with habitat upload support.

pull/68/head
Mark Jessop 2018-05-26 18:48:53 +09:30
rodzic 90fcb35aea
commit a1c9a7cc6c
19 zmienionych plików z 590 dodań i 3079 usunięć

Wyświetl plik

@ -1,67 +0,0 @@
# APRS push utils for Sonde auto RX.
from socket import *
# Push a Radiosonde data packet to APRS as an object.
def push_balloon_to_aprs(sonde_data, object_name="<id>", aprs_comment="BOM Balloon", aprsUser="N0CALL", aprsPass="00000", serverHost = 'rotate.aprs2.net', serverPort = 14580):
if object_name == "<id>":
object_name = sonde_data["id"].strip()
# Pad or limit the object name to 9 characters.
if len(object_name) > 9:
object_name = object_name[:9]
elif len(object_name) < 9:
object_name = object_name + " "*(9-len(object_name))
# Convert float latitude to APRS format (DDMM.MM)
lat = float(sonde_data["lat"])
lat_degree = abs(int(lat))
lat_minute = abs(lat - int(lat)) * 60.0
lat_min_str = ("%02.2f" % lat_minute).zfill(5)
lat_dir = "S"
if lat>0.0:
lat_dir = "N"
lat_str = "%02d%s" % (lat_degree,lat_min_str) + lat_dir
# Convert float longitude to APRS format (DDDMM.MM)
lon = float(sonde_data["lon"])
lon_degree = abs(int(lon))
lon_minute = abs(lon - int(lon)) * 60.0
lon_min_str = ("%02.2f" % lon_minute).zfill(5)
lon_dir = "E"
if lon<0.0:
lon_dir = "W"
lon_str = "%03d%s" % (lon_degree,lon_min_str) + lon_dir
# Convert Alt (in metres) to feet
alt = int(float(sonde_data["alt"])/0.3048)
# TODO: Process velocity/heading, if supplied.
# TODO: Limit comment length.
# Produce the APRS object string.
if ('heading' in sonde_data.keys()) and ('vel_h' in sonde_data.keys()):
course_speed = "%03d/%03d" % (int(sonde_data['heading']), int(sonde_data['vel_h']*1.944))
else:
course_speed = "000/000"
out_str = ";%s*111111z%s/%sO%s/A=%06d %s" % (object_name,lat_str,lon_str,course_speed,alt,aprs_comment)
# Connect to an APRS-IS server, login, then push our object position in.
# create socket & connect to server
sSock = socket(AF_INET, SOCK_STREAM)
sSock.connect((serverHost, serverPort))
# logon
sSock.send('user %s pass %s vers VK5QI-Python 0.01\n' % (aprsUser, aprsPass) )
# send packet
sSock.send('%s>APRS:%s\n' % (aprsUser, out_str) )
# close socket
sSock.shutdown(0)
sSock.close()
return out_str

Wyświetl plik

@ -1,68 +0,0 @@
"""
AsynchronousFileReader
======================
Simple thread based asynchronous file reader for Python.
see https://github.com/soxofaan/asynchronousfilereader
MIT License
Copyright (c) 2014 Stefaan Lippens
"""
__version__ = '0.2.1'
import threading
try:
# Python 2
from Queue import Queue
except ImportError:
# Python 3
from queue import Queue
class AsynchronousFileReader(threading.Thread):
"""
Helper class to implement asynchronous reading of a file
in a separate thread. Pushes read lines on a queue to
be consumed in another thread.
"""
def __init__(self, fd, queue=None, autostart=True):
self._fd = fd
if queue is None:
queue = Queue()
self.queue = queue
self.running = True
threading.Thread.__init__(self)
if autostart:
self.start()
def run(self):
"""
The body of the tread: read lines and put them on the queue.
"""
while self.running:
line = self._fd.readline()
if not line:
break
self.queue.put(line)
def eof(self):
"""
Check whether there is no more content to expect.
"""
return not self.is_alive() and self.queue.empty()
def stop(self):
"""
Stop the running thread.
"""
self.running = False
def readlines(self):
"""
Get currently available lines.
"""
while not self.queue.empty():
yield self.queue.get()

Plik diff jest za duży Load Diff

Wyświetl plik

@ -6,12 +6,14 @@
# NOTE: If running this from crontab, make sure to set the appropriate PATH env-vars,
# else utilities like rtl_power and rtl_fm won't be found.
#
# WARNING - THIS IS DEPRECATED - USE THE SYSTEMD SERVICE
#
# change into appropriate directory
cd /home/pi/radiosonde_auto_rx/auto_rx/
# Clean up old files
rm log_power.csv
rm log_power*.csv
# Start auto_rx process with a 3 hour timeout.
timeout 14400 python auto_rx.py 2>error.log

Wyświetl plik

@ -5,4 +5,4 @@
# Copyright (C) 2018 Mark Jessop <vk5qi@rfhead.net>
# Released under GNU GPL v3 or later
#
__version__ = "20180525"
__version__ = "20180525-alpha"

Wyświetl plik

@ -53,7 +53,6 @@ def read_auto_rx_config(filename):
'habitat_uploader_callsign': 'SONDE_AUTO_RX',
'habitat_upload_listener_position': False,
'habitat_payload_callsign': '<id>',
'habitat_payload_description': 'Meteorological Radiosonde',
# APRS Settings
'aprs_enabled' : False,
'aprs_upload_rate': 30,
@ -72,6 +71,7 @@ def read_auto_rx_config(filename):
'synchronous_upload' : False,
'scan_dwell_time' : 20,
'detect_dwell_time' : 5,
'scan_delay' : 10,
'payload_id_valid' : 5,
# Rotator Settings
'enable_rotator': False,
@ -83,7 +83,6 @@ def read_auto_rx_config(filename):
# OziExplorer Settings
'ozi_enabled' : False,
'ozi_update_rate': 5,
'ozi_hostname' : '127.0.0.1',
'ozi_port' : 55681,
'payload_summary_enabled': False,
'payload_summary_port' : 55672
@ -124,7 +123,6 @@ def read_auto_rx_config(filename):
auto_rx_config['habitat_enabled'] = config.getboolean('habitat', 'habitat_enabled')
auto_rx_config['habitat_upload_rate'] = config.getint('habitat', 'upload_rate')
auto_rx_config['habitat_payload_callsign'] = config.get('habitat', 'payload_callsign')
auto_rx_config['habitat_payload_description'] = config.get('habitat', 'payload_description')
auto_rx_config['habitat_uploader_callsign'] = config.get('habitat', 'uploader_callsign')
auto_rx_config['habitat_upload_listener_position'] = config.getboolean('habitat','upload_listener_position')
@ -153,11 +151,13 @@ def read_auto_rx_config(filename):
auto_rx_config['max_peaks'] = config.getint('advanced', 'max_peaks')
auto_rx_config['scan_dwell_time'] = config.getint('advanced', 'scan_dwell_time')
auto_rx_config['detect_dwell_time'] = config.getint('advanced', 'detect_dwell_time')
auto_rx_config['scan_delay'] = config.getint('advanced', 'scan_delay')
auto_rx_config['payload_id_valid'] = config.getint('advanced', 'payload_id_valid')
auto_rx_config['synchronous_upload'] = config.getboolean('advanced', 'synchronous_upload')
# Rotator Settings (TBC)
auto_rx_config['rotator_enabled'] = config.getboolean('rotator','rotator_enabled')
auto_rx_config['rotator_update_rate'] = config.getint('rotator', 'update_rate')
auto_rx_config['rotator_hostname'] = config.get('rotator', 'rotator_hostname')
auto_rx_config['rotator_port'] = config.getint('rotator', 'rotator_port')
auto_rx_config['rotator_homing_enabled'] = config.getboolean('rotator', 'rotator_homing_enabled')
@ -176,7 +176,7 @@ def read_auto_rx_config(filename):
_bias = config.getboolean(_section, 'bias')
if (auto_rx_config['sdr_quantity'] > 1) and (_device_idx == '0'):
logging.error("Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!")
logging.critical("Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!")
return None
# See if the SDR exists.
@ -190,6 +190,25 @@ def read_auto_rx_config(filename):
logging.error("Config - Error parsing SDR %d config - %s" % (_n,str(e)))
continue
# Sanity checks when using more than one SDR
if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['habitat_payload_callsign'] != "<id>"):
logging.critical("Fixed Habitat Payload callsign used in a multi-SDR configuration. Go read the warnings in the config file!")
return None
if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['aprs_object_id'] != "<id>"):
logging.critical("Fixed APRS object ID used in a multi-SDR configuration. Go read the warnings in the config file!")
return None
if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['rotator_enabled']):
logging.critical("Rotator enabled in a multi-SDR configuration. Go read the warnings in the config file!")
return None
# TODO: Revisit this limitation once the OziPlotter output sub-module is complete.
if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['ozi_enabled'] or auto_rx_config['payload_summary_enabled']):
logging.critical("Chase car outputs (OziPlotter/Payload Summary) enabled in a multi-SDR configuration.")
return None
if len(auto_rx_config['sdr_settings'].keys()) == 0:
# We have no SDRs to use!!
logging.error("Config - No working SDRs! Cannot run...")

Wyświetl plik

@ -373,29 +373,30 @@ class SondeDecoder(object):
if 'aux' in _telemetry:
_telemetry['type'] += "-Ozone"
# If we have been provided a telemetry filter function, pass the telemetry data
# through the filter, and return the response
# By default, we will assume the telemetry is OK.
_telem_ok = True
if self.telem_filter is not None:
try:
_telem_ok = self.telem_filter(_telemetry)
except Exception as e:
self.log_error("Failed to run telemetry filter - %s" % str(e))
_telem_ok = True
# Send to the exporter functions (if we have any).
# If the telemetry is OK, send to the exporter functions (if we have any).
if self.exporters is None:
return
else:
for _exporter in self.exporters:
try:
_exporter(_telemetry)
except Exception as e:
self.log_error("Exporter Error %s" % str(e))
if _telem_ok:
for _exporter in self.exporters:
try:
_exporter(_telemetry)
except Exception as e:
self.log_error("Exporter Error %s" % str(e))
# If we have been provided a telemetry filter function, pass the telemetry data
# through the filter, and return the response
if self.telem_filter is not None:
try:
_ok = self.telem_filter(_telemetry)
return _ok
except Exception as e:
self.log_error("Failed to run telemetry filter - %s" % str(e))
# Otherwise, just assume the telemetry is good.
else:
return True
return _telem_ok
@ -405,7 +406,7 @@ class SondeDecoder(object):
Args:
line (str): Message to be logged.
"""
logging.debug("Decoder %s %.3f - %s" % (self.sonde_type, self.sonde_freq/1e6, line))
logging.debug("Decoder #%s %s %.3f - %s" % (str(self.device_idx), self.sonde_type, self.sonde_freq/1e6, line))
def log_info(self, line):
@ -413,7 +414,7 @@ class SondeDecoder(object):
Args:
line (str): Message to be logged.
"""
logging.info("Decoder %s %.3f - %s" % (self.sonde_type, self.sonde_freq/1e6, line))
logging.info("Decoder #%s %s %.3f - %s" % (str(self.device_idx), self.sonde_type, self.sonde_freq/1e6, line))
def log_error(self, line):
@ -421,7 +422,7 @@ class SondeDecoder(object):
Args:
line (str): Message to be logged.
"""
logging.error("Decoder %s %.3f - %s" % (self.sonde_type, self.sonde_freq/1e6, line))
logging.error("Decoder #%s %s %.3f - %s" % (str(self.device_idx), self.sonde_type, self.sonde_freq/1e6, line))
def stop(self):

Wyświetl plik

@ -13,7 +13,7 @@ import os
def get_ephemeris(destination="ephemeris.dat"):
''' Download the latest GPS ephemeris file from the CDDIS's FTP server '''
try:
logging.info("Connecting to GSFC FTP Server...")
logging.debug("GPS Grabber - Connecting to GSFC FTP Server...")
ftp = ftplib.FTP("cddis.gsfc.nasa.gov", timeout=10)
ftp.login("anonymous","anonymous")
ftp.cwd("gnss/data/daily/%s/brdc/" % datetime.datetime.utcnow().strftime("%Y"))
@ -28,10 +28,10 @@ def get_ephemeris(destination="ephemeris.dat"):
elif file_suffix in file_list[-2]:
download_file = file_list[-2]
else:
logging.error("Could not find appropriate ephemeris file.")
logging.error("GPS Grabber - Could not find appropriate ephemeris file.")
return None
logging.info("Downloading ephemeris data file: %s" % download_file)
logging.debug("GPS Grabber - Downloading ephemeris data file: %s" % download_file)
# Download file.
f_eph = open(destination+".Z",'wb')
@ -42,11 +42,11 @@ def get_ephemeris(destination="ephemeris.dat"):
# Unzip file.
os.system("gunzip -q -f ./%s" % (destination+".Z"))
logging.info("Ephemeris downloaded to %s successfuly!" % destination)
logging.info("GPS Grabber - Ephemeris downloaded to %s successfuly!" % destination)
return destination
except Exception as e:
logging.error("Could not download ephemeris file. - %s" % str(e))
logging.error("GPS Grabber - Could not download ephemeris file. - %s" % str(e))
return None
def get_almanac(destination="almanac.txt", timeout=20):
@ -58,13 +58,13 @@ def get_almanac(destination="almanac.txt", timeout=20):
f = open(destination,'w')
f.write(data)
f.close()
logging.info("Almanac downloaded to %s successfuly!" % destination)
logging.info("GPS Grabber - Almanac downloaded to %s successfuly!" % destination)
return destination
else:
logging.error("Downloaded file is not a GPS almanac.")
logging.error("GPS Grabber - Downloaded file is not a GPS almanac.")
return None
except Exception as e:
logging.error("Failed to download almanac data - " % str(e))
logging.error("GPS Grabber - Failed to download almanac data - " % str(e))
return None

Wyświetl plik

@ -16,6 +16,7 @@ import json
from base64 import b64encode
from hashlib import sha256
from threading import Thread
from . import __version__ as auto_rx_version
try:
# Python 2
from Queue import Queue
@ -325,7 +326,7 @@ def uploadListenerPosition(callsign, lat, lon, version=''):
# If this fails, it means we can't contact the Habitat server,
# so there is no point continuing.
if resp is False:
return
return False
doc = {
'type': 'listener_telemetry',
@ -344,8 +345,10 @@ def uploadListenerPosition(callsign, lat, lon, version=''):
resp = postListenerData(doc)
if resp is True:
logging.info("Habitat - Listener information uploaded.")
return True
else:
logging.error("Habitat - Unable to upload listener information.")
return False
#
@ -394,7 +397,7 @@ class HabitatUploader(object):
when a new sonde ID is observed.
payload_callsign_override (str): Override the payload callsign in the uploaded sentence with this value.
WARNING: This will horrible break the tracker map if multiple sondes are uploaded under the same callsign.
WARNING: This will horribly break the tracker map if multiple sondes are uploaded under the same callsign.
USE WITH CAUTION!!!
synchronous_upload_time (int): Upload the most recent telemetry when time.time()%synchronous_upload_time == 0
@ -413,6 +416,7 @@ class HabitatUploader(object):
"""
self.user_callsign = user_callsign
self.user_position = user_position
self.payload_callsign_override = payload_callsign_override
self.upload_timeout = upload_timeout
self.upload_retries = upload_retries
@ -434,6 +438,7 @@ class HabitatUploader(object):
# 'data' (Queue): A queue of telemetry sentences to be uploaded. When the upload timer fires,
# this queue will be dumped, and the most recent telemetry uploaded.
# 'habitat_document' (bool): Indicates if a habitat document has been created for this payload ID.
# 'listener_updated' (bool): Indicates if the listener position has been updated for the start of this ID's flight.
self.observed_payloads = {}
# Start the uploader thread.
@ -450,6 +455,10 @@ class HabitatUploader(object):
self.timer_thread = Thread(target=self.upload_timer)
self.timer_thread.start()
# Upload listener position
if self.user_position is not None:
uploadListenerPosition(self.user_callsign, self.user_position[0], self.user_position[1], version=auto_rx_version)
def habitat_upload(self, sentence):
@ -627,7 +636,7 @@ class HabitatUploader(object):
if _id not in self.observed_payloads:
# We haven't seen this ID before, so create a new dictionary entry for it.
self.observed_payloads[_id] = {'count':1, 'data':Queue(), 'habitat_document': False}
self.observed_payloads[_id] = {'count':1, 'data':Queue(), 'habitat_document': False, 'listener_updated': False}
self.log_debug("New Payload %s. Not observed enough to allow upload." % _id)
# However, we don't yet add anything to the queue for this payload...
else:
@ -637,7 +646,16 @@ class HabitatUploader(object):
# If we have seen this particular ID enough times, add the data to the ID's queue.
if self.observed_payloads[_id]['count'] >= self.callsign_validity_threshold:
# Add the telemetry to the queue
self.observed_payloads[_id]['data'].put(_telem)
# If this is the first time we have observed this payload, update the listener position.
if (self.observed_payloads[_id]['listener_updated'] == False) and (self.user_position is not None):
self.observed_payloads[_id]['listener_updated'] = uploadListenerPosition(
self.user_callsign,
self.user_position[0],
self.user_position[1],
version=auto_rx_version)
else:
self.log_debug("Payload ID %s not observed enough to allow upload." % _id)
@ -717,23 +735,5 @@ class HabitatUploader(object):
logging.warning("Habitat - %s" % line)
#
# Functions for uploading telemetry to Habitat
#
# DEPRECATED - USE
def habitat_upload_payload_telemetry(uploader, telemetry, payload_callsign = "RADIOSONDE", callsign="N0CALL", comment=None):
''' Add a packet of radiosonde telemetry to the Habitat uploader queue. '''
sentence = telemetry_to_sentence(telemetry, payload_callsign = payload_callsign, comment=comment)
try:
uploader.add(sentence)
except Exception as e:
logging.error("Could not add telemetry to Habitat Uploader - %s" % str(e))

Wyświetl plik

@ -80,15 +80,15 @@ def run_rtl_power(start, stop, step, filename="log_power.csv", dwell = 20, sdr_p
gain_param,
filename)
logging.info("Scanner - Running frequency scan.")
logging.debug("Scanner - Running command: %s" % rtl_power_cmd)
logging.info("Scanner #%s - Running frequency scan." % str(device_idx))
#logging.debug("Scanner - Running command: %s" % rtl_power_cmd)
try:
FNULL = open(os.devnull, 'w')
subprocess.check_call(rtl_power_cmd, shell=True, stderr=FNULL)
FNULL.close()
except subprocess.CalledProcessError:
logging.critical("Scanner - rtl_power call failed!")
logging.critical("Scanner #%s - rtl_power call failed!" % str(device_idx))
return False
else:
return True
@ -192,8 +192,7 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device
rx_test_command += "sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -t wav - highpass 20 2>/dev/null |"
rx_test_command += os.path.join(rs_path,"rs_detect") + " -z -t 8 2>/dev/null >/dev/null"
logging.info("Scanner - Attempting sonde detection on %.3f MHz" % (frequency/1e6))
logging.debug("Scanner - Running command: %s" % rx_test_command)
logging.debug("Scanner #%s - Attempting sonde detection on %.3f MHz" % (str(device_idx), frequency/1e6))
try:
FNULL = open(os.devnull, 'w')
@ -201,7 +200,7 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device
FNULL.close()
except Exception as e:
# Something broke when running the detection function.
logging.error("Scanner - Error when running rs_detect - %s" % str(e))
logging.error("Scanner #%s - Error when running rs_detect - %s" % (str(device_idx), str(e)))
return None
# Shift down by a byte... for some reason.
@ -223,19 +222,19 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device
ret_code = abs(ret_code)
if ret_code == 3:
logging.info("Scanner - Detected a RS41!")
logging.debug("Scanner #%s - Detected a RS41!" % str(device_idx))
return inv+"RS41"
elif ret_code == 4:
logging.info("Scanner - Detected a RS92!")
logging.debug("Scanner #%s - Detected a RS92!" % str(device_idx))
return inv+"RS92"
elif ret_code == 2:
logging.info("Scanner - Detected a DFM Sonde!")
logging.debug("Scanner #%s - Detected a DFM Sonde!" % str(device_idx))
return inv+"DFM"
elif ret_code == 5:
logging.info("Scanner - Detected a M10 Sonde! (Unsupported)")
logging.debug("Scanner #%s - Detected a M10 Sonde! (Unsupported)" % str(device_idx))
return inv+"M10"
elif ret_code == 6:
logging.info("Scanner - Detected a iMet Sonde! (Unsupported)")
logging.debug("Scanner #%s - Detected a iMet Sonde! (Unsupported)" % str(device_idx))
return inv+"iMet"
else:
return None
@ -267,6 +266,7 @@ class SondeScanner(object):
quantization = 10000,
scan_dwell_time = 20,
detect_dwell_time = 5,
scan_delay = 10,
max_peaks = 10,
rs_path = "./",
sdr_power = "rtl_power",
@ -295,6 +295,7 @@ class SondeScanner(object):
Essentially all radiosondes transmit on 10 kHz channel steps.
scan_dwell_time (int): Number of seconds for rtl_power to average spectrum over. Default = 20 seconds.
detect_dwell_time (int): Number of seconds to allow rs_detect to attempt to detect a sonde. Default = 5 seconds.
scan_delay (int): Delay X seconds between scan runs.
max_peaks (int): Maximum number of peaks to search over. Peaks are ordered by signal power before being limited to this number.
rs_path (str): Path to the RS binaries (i.e rs_detect). Defaults to ./
sdr_power (str): Path to rtl_power, or drop-in equivalent. Defaults to 'rtl_power'
@ -321,6 +322,7 @@ class SondeScanner(object):
self.quantization = quantization
self.scan_dwell_time = scan_dwell_time
self.detect_dwell_time = detect_dwell_time
self.scan_delay = scan_delay
self.max_peaks = max_peaks
self.rs_path = rs_path
self.sdr_power = sdr_power
@ -360,6 +362,20 @@ class SondeScanner(object):
self.log_warning("Sonde scan already running!")
def send_to_callback(self, results):
""" Send scan results to a callback.
Args:
results (list): List consisting of [freq, type)]
"""
try:
if self.callback != None:
self.callback(results)
except Exception as e:
self.log_error("Error handling scan results - %s" % str(e))
def scan_loop(self):
""" Continually perform scans, and pass any results onto the callback function """
@ -389,13 +405,9 @@ class SondeScanner(object):
else:
# Scan completed successfuly! Reset the error counter.
self.error_retries = 0
# If we have scan results, pass them onto the callback.
if len(_results) > 0:
try:
if self.callback != None:
self.callback(_results)
except Exception as e:
self.log_error("Error handling scan results - %s" % str(e))
# Sleep before starting the next scan.
time.sleep(self.scan_delay)
@ -461,7 +473,7 @@ class SondeScanner(object):
# If we have found no peaks, and no greylist has been provided, re-scan.
if (len(peak_indices) == 0) and (len(self.greylist) == 0):
self.log_info("No peaks found.")
self.log_debug("No peaks found.")
return []
# Sort peaks by power.
@ -489,10 +501,10 @@ class SondeScanner(object):
peak_frequencies = np.append(np.array(self.greylist)*1e6, peak_frequencies)
if len(peak_frequencies) == 0:
self.log_info("No peaks found after blacklist frequencies removed.")
self.log_debug("No peaks found after blacklist frequencies removed.")
return []
else:
self.log_info("Performing scan on %d frequencies (MHz): %s" % (len(peak_frequencies),str(peak_frequencies/1e6)))
self.log_info("Detected peaks on %d frequencies (MHz): %s" % (len(peak_frequencies),str(peak_frequencies/1e6)))
else:
# We have been provided a whitelist - scan through the supplied frequencies.
@ -517,6 +529,9 @@ class SondeScanner(object):
if detected != None:
# Add a detected sonde to the output array
_search_results.append([freq, detected])
# Immediately send this result to the callback.
self.send_to_callback([[freq, detected]])
# If we only want the first detected sonde, then return now.
if first_only:
return _search_results
@ -524,9 +539,9 @@ class SondeScanner(object):
# Otherwise, we continue....
if len(_search_results) == 0:
self.log_info("No sondes detected.")
self.log_debug("No sondes detected.")
else:
self.log_info("Detected Sondes: %s" % str(_search_results))
self.log_debug("Scan Detected Sondes: %s" % str(_search_results))
return _search_results

Wyświetl plik

@ -1,370 +0,0 @@
#!/usr/bin/env python
#
# Radiosonde Auto RX Service - V2.0
#
# Copyright (C) 2018 Mark Jessop <vk5qi@rfhead.net>
# Released under GNU GPL v3 or later
#
# Refer github page for instructions on setup and usage.
# https://github.com/projecthorus/radiosonde_auto_rx/
#
import argparse
import datetime
import logging
import sys
import time
import traceback
from autorx.scan import SondeScanner
from autorx.decode import SondeDecoder
from autorx.logger import TelemetryLogger
from autorx.habitat import HabitatUploader
from autorx.utils import rtlsdr_test
from autorx.config import read_auto_rx_config
try:
# Python 2
from Queue import Queue
except ImportError:
# Python 3
from queue import Queue
# Logging level
# INFO = Basic status messages
# DEBUG = Adds detailed information on submodule operations.
logging_level = logging.DEBUG
#
# Global Variables
#
RS_PATH = "./"
# Optional override for RS92 ephemeris data.
rs92_ephemeris = None
# Global configuration dictionary
config = None
# Exporter Lists
exporter_objects = [] # This list will hold references to each exporter instance that is created.
exporter_functions = [] # This list will hold references to the exporter add functions, which will be passed onto the decoders.
# RTLSDR Usage Register - This dictionary holds information about each SDR and its currently running Decoder / Scanner
# Key = SDR device index / ID
# 'device_idx': {
# 'in_use' (bool) : True if the SDR is currently in-use by a decoder or scanner.
# 'task' (class) : If this SDR is in use, a reference to the task.
# 'bias' (bool) : True if the bias-tee should be enabled on this SDR, False otherwise.
# 'ppm' (int) : The PPM offset for this SDR.
# 'gain' (float) : The gain setting to use with this SDR. A setting of -1 turns on hardware AGC.
# }
#
#
sdr_list = {}
# Currently running task register.
# Keys will either be 'SCAN' (only one scanner shall be running at a time), or a sonde frequency in MHz.
# Each element contains:
# 'task' : (class) Reference to the currently running task.
# 'device_idx' (str): The allocated SDR.
#
task_list = {}
# Scan Result Queue
# Scan results are processed asynchronously from the main scanner object.
scan_results = Queue()
def allocate_sdr(check_only = False):
""" Allocate an un-used SDR for a task.
Args:
check_only (bool) : If True, don't set the free SDR as in-use. Used to check if there are any free SDRs.
Returns:
(str): The device index/serial number of the free/allocated SDR, if one is free, else None.
"""
global sdr_list
for _idx in sdr_list.keys():
if sdr_list[_idx]['in_use'] == False:
# Found a free SDR!
if check_only:
# If we are just checking to see if there are any SDRs free, we don't allocate it.
pass
else:
# Otherwise, set the SDR as in-use.
sdr_list[_idx]['in_use'] = True
logging.info("SDR #%s has been allocated." % str(_idx))
return _idx
# Otherwise, no SDRs are free.
return None
def clean_task_list():
""" Routinely run to check the task list to see if any tasks have stopped running. If so, release the associated SDR """
global task_list, sdr_list
for _key in task_list.keys():
# Attempt to get the state of the task
try:
_running = task_list[_key]['task'].running()
_task_sdr = task_list[_key]['device_idx']
except Exception as e:
logging.error("Task Manager - Error getting task %s state - %s" % (str(_key),str(e)))
continue
if _running == False:
# This task has stopped. Release it's associated SDR.
sdr_list[_task_sdr]['in_use'] = False
sdr_list[_task_sdr]['task'] = None
# Pop the task from the task list.
task_list.pop(_key)
# Check if there is a scanner thread still running. If not, and if there is a SDR free, start one up again.
if ('SCAN' not in task_list) and (allocate_sdr(check_only=True) is not None):
# We have a SDR free, and we are not running a scan thread. Start one.
start_scanner()
def start_scanner():
""" Start a scanner thread on the first available SDR """
global task_list, sdr_list, config, scan_results, RS_PATH
if 'SCAN' in task_list:
# Already a scanner running! Return.
logging.debug("Task Manager - Attempted to start a scanner, but one already running.")
return
# Attempt to allocate a SDR.
_device_idx = allocate_sdr()
if _device_idx is None:
logging.debug("Task Manager - No SDRs free to run Scanner.")
return
else:
# Create entry in task list.
task_list['SCAN'] = {'device_idx': _device_idx, 'task': None}
# Init Scanner using settings from the global config.
task_list['SCAN']['task'] = SondeScanner(
callback = scan_results.put,
auto_start = True,
min_freq = config['min_freq'],
max_freq = config['max_freq'],
search_step = config['search_step'],
whitelist = config['whitelist'],
greylist = config['greylist'],
blacklist = config['blacklist'],
snr_threshold = config['snr_threshold'],
min_distance = config['min_distance'],
quantization = config['quantization'],
scan_dwell_time = config['scan_dwell_time'],
detect_dwell_time = config['detect_dwell_time'],
max_peaks = config['max_peaks'],
rs_path = RS_PATH,
sdr_power = config['sdr_power'],
sdr_fm = config['sdr_fm'],
device_idx = _device_idx,
gain = sdr_list[_device_idx]['gain'],
ppm = sdr_list[_device_idx]['ppm'],
bias = sdr_list[_device_idx]['bias']
)
# Add a reference into the sdr_list entry
sdr_list[_device_idx]['task'] = task_list['SCAN']['task']
def stop_scanner():
""" Stop a currently running scan thread, and release the SDR it was using. """
global task_list, sdr_list
if 'SCAN' not in task_list:
# No scanner thread running!
# This means we likely have a SDR free already.
return
else:
logging.info("Halting Scanner to decode detected radiosonde.")
_scan_sdr = task_list['SCAN']['device_idx']
# Stop the scanner.
task_list['SCAN']['task'].stop()
# Relase the SDR.
sdr_list[_scan_sdr]['in_use'] = False
sdr_list[_scan_sdr]['task'] = None
# Remove the scanner task from the task list
task_list.pop('SCAN')
def start_decoder(freq, sonde_type):
""" Attempt to start a decoder thread """
global config, task_list, sdr_list, RS_PATH, exporter_functions, rs92_ephemeris
# Allocate a SDR.
_device_idx = allocate_sdr()
if _device_idx is None:
logging.error("Could not allocate SDR for decoder!")
return
else:
# Add an entry to the task list
task_list[freq] = {'device_idx': _device_idx, 'task': None}
# Set the SDR to in-use
sdr_list[_device_idx]['in_use'] = True
# Initialise a decoder.
task_list[freq]['task'] = SondeDecoder(
sonde_type = sonde_type,
sonde_freq = freq,
rs_path = RS_PATH,
sdr_fm = config['sdr_fm'],
device_idx = _device_idx,
gain = sdr_list[_device_idx]['gain'],
ppm = sdr_list[_device_idx]['ppm'],
bias = sdr_list[_device_idx]['bias'],
exporter = exporter_functions,
timeout = config['rx_timeout'],
telem_filter = telemetry_filter,
rs92_ephemeris = rs92_ephemeris
)
sdr_list[_device_idx]['task'] = task_list[freq]['task']
def handle_scan_results():
""" Read in Scan results via the scan results Queue.
Depending on how many SDRs are available, two things can happen:
- If there is a free SDR, allocate it to a decoder.
- If there is no free SDR, but a scanner is running, stop the scanner and start decoding.
"""
global scan_results, task_list, sdr_list
if scan_results.qsize() > 0:
_scan_data = scan_results.get()
for _sonde in _scan_data:
_freq = _sonde[0]
_type = _sonde[1]
if _freq in task_list:
# Already decoding this sonde, continue.
continue
else:
if allocate_sdr(check_only=True) is not None :
# There is a SDR free! Start the decoder on that SDR
start_decoder(_freq, _type)
elif (allocate_sdr(check_only=True) is None) and ('SCAN' in task_list):
# We have run out of SDRs, but a scan thread is running.
# Stop the scan thread and take that receiver!
stop_scanner()
start_decoder(_freq, _type)
else:
# We have no SDRs free
pass
def stop_all():
""" Shut-down all decoders, scanners, and exporters. """
global task_list, exporter_objects
logging.info("Starting shutdown of all threads.")
for _task in task_list.keys():
try:
task_list[_task]['task'].stop()
except Exception as e:
logging.error("Error stopping task - %s" % str(e))
for _exporter in exporter_objects:
try:
_exporter.close()
except Exception as e:
logging.error("Error stopping exporter - %s" % str(e))
def telemetry_filter(telemetry):
""" Filter incoming radiosonde telemetry based on distance from the receiver """
global config
# TODO
return True
def main():
""" Main Loop """
global config, sdr_list, exporter_objects, exporter_functions
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', filename=datetime.datetime.utcnow().strftime("log/%Y%m%d-%H%M%S_system.log"), level=logging_level)
stdout_format = logging.Formatter('%(asctime)s %(levelname)s:%(message)s')
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(stdout_format)
logging.getLogger().addHandler(stdout_handler)
# Set the requests logger to only display WARNING messages or higher.
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.CRITICAL)
urllib3_log = logging.getLogger("urllib3")
urllib3_log.setLevel(logging.CRITICAL)
# Command line arguments.
parser = argparse.ArgumentParser()
parser.add_argument("-c" ,"--config", default="station_new.cfg", help="Receive Station Configuration File")
parser.add_argument("-f", "--frequency", type=float, default=0.0, help="Sonde Frequency (MHz) (bypass scan step, and quit if no sonde found).")
parser.add_argument("-e", "--ephemeris", type=str, default="None", help="Use a manually obtained ephemeris file.")
args = parser.parse_args()
# Attempt to read in config file
logging.info("Reading configuration file...")
_temp_cfg = read_auto_rx_config(args.config)
if _temp_cfg is None:
logging.critical("Error in configuration file! Exiting...")
sys.exit(1)
else:
config = _temp_cfg
sdr_list = config['sdr_settings']
# If we have been supplied a frequency via the command line, override the whitelist settings.
if args.frequency != 0.0:
config['whitelist'] = [args.frequency]
# Start our exporter options
if config['per_sonde_log']:
_logger = TelemetryLogger(log_directory="./testlog/")
exporter_objects.append(_logger)
exporter_functions.append(_logger.add)
# Habitat
# APRS
# OziExplorer
while True:
clean_task_list()
handle_scan_results()
time.sleep(5)
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
stop_all()
except Exception as e:
traceback.print_exc()
print("Main Loop Error - %s" % str(e))
stop_all()

Wyświetl plik

@ -1,145 +0,0 @@
#!/usr/bin/env python
#
# Radiosonde Auto RX Tools - Configuration File Parser
#
# 2017-04 Mark Jessop <vk5qi@rfhead.net>
#
import ConfigParser
import logging
import traceback
import json
def read_auto_rx_config(filename):
# Configuration Defaults:
auto_rx_config = {
'per_sonde_log' : True,
'sdr_fm_path': 'rtl_fm',
'sdr_power_path': 'rtl_power',
'sdr_ppm' : 0,
'sdr_gain' : -1,
'sdr_bias' : False,
'search_attempts': 5,
'search_delay' : 10,
'min_freq' : 400.4,
'max_freq' : 404.0,
'search_step' : 800,
'min_snr' : 10,
'min_distance' : 1000,
'dwell_time' : 10,
'quantization' : 10000,
'rx_timeout' : 120,
'station_lat' : 0.0,
'station_lon' : 0.0,
'station_alt' : 0.0,
'upload_rate' : 30,
'synchronous_upload' : False,
'enable_aprs' : False,
'enable_habitat': False,
'aprs_user' : 'N0CALL',
'aprs_pass' : '00000',
'aprs_server' : 'rotate.aprs2.net',
'aprs_object_id': '<id>',
'aprs_custom_comment': 'Radiosonde Auto-RX <freq>',
'payload_callsign': '<id>',
'payload_description': 'Meteorological Radiosonde',
'uploader_callsign': 'SONDE_AUTO_RX',
'upload_listener_position': False,
'enable_rotator': False,
'rotator_hostname': '127.0.0.1',
'rotator_port' : 4533,
'rotator_homing_enabled': False,
'rotator_home_azimuth': 0,
'rotator_home_elevation': 0,
'ozi_enabled' : False,
'ozi_update_rate': 5,
'ozi_hostname' : '127.0.0.1',
'ozi_port' : 55681,
'mqtt_enabled' : False,
'mqtt_hostname' : '127.0.0.1',
'mqtt_port' : 1883,
'payload_summary_enabled': False,
'payload_summary_port' : 55672,
'whitelist' : [],
'blacklist' : [],
'greylist' : [],
'max_altitude' : 50000,
'max_radius_km' : 1000,
'payload_id_valid' : 5 # TODO: Add this to config file in next bulk update.
}
try:
config = ConfigParser.RawConfigParser(auto_rx_config)
config.read(filename)
auto_rx_config['per_sonde_log'] = config.getboolean('logging', 'per_sonde_log')
auto_rx_config['sdr_fm_path'] = config.get('sdr','sdr_fm_path')
auto_rx_config['sdr_power_path'] = config.get('sdr','sdr_power_path')
auto_rx_config['sdr_ppm'] = int(config.getfloat('sdr', 'sdr_ppm'))
auto_rx_config['sdr_gain'] = config.getfloat('sdr', 'sdr_gain')
auto_rx_config['sdr_bias'] = config.getboolean('sdr', 'sdr_bias')
auto_rx_config['search_attempts'] = config.getint('search_params', 'search_attempts')
auto_rx_config['search_delay'] = config.getint('search_params', 'search_delay')
auto_rx_config['min_freq'] = config.getfloat('search_params', 'min_freq')
auto_rx_config['max_freq'] = config.getfloat('search_params', 'max_freq')
auto_rx_config['search_step'] = config.getfloat('search_params', 'search_step')
auto_rx_config['min_snr'] = config.getfloat('search_params', 'min_snr')
auto_rx_config['min_distance'] = config.getfloat('search_params', 'min_distance')
auto_rx_config['dwell_time'] = config.getint('search_params', 'dwell_time')
auto_rx_config['quantization'] = config.getint('search_params', 'quantization')
auto_rx_config['rx_timeout'] = config.getint('search_params', 'rx_timeout')
auto_rx_config['station_lat'] = config.getfloat('location', 'station_lat')
auto_rx_config['station_lon'] = config.getfloat('location', 'station_lon')
auto_rx_config['station_alt'] = config.getfloat('location', 'station_alt')
auto_rx_config['upload_rate'] = config.getint('upload', 'upload_rate')
auto_rx_config['synchronous_upload'] = config.getboolean('upload','synchronous_upload')
auto_rx_config['enable_aprs'] = config.getboolean('upload', 'enable_aprs')
auto_rx_config['enable_habitat'] = config.getboolean('upload', 'enable_habitat')
auto_rx_config['aprs_user'] = config.get('aprs', 'aprs_user')
auto_rx_config['aprs_pass'] = config.get('aprs', 'aprs_pass')
auto_rx_config['aprs_server'] = config.get('aprs', 'aprs_server')
auto_rx_config['aprs_object_id'] = config.get('aprs', 'aprs_object_id')
auto_rx_config['aprs_custom_comment'] = config.get('aprs', 'aprs_custom_comment')
auto_rx_config['payload_callsign'] = config.get('habitat', 'payload_callsign')
auto_rx_config['payload_description'] = config.get('habitat', 'payload_description')
auto_rx_config['uploader_callsign'] = config.get('habitat', 'uploader_callsign')
auto_rx_config['upload_listener_position'] = config.getboolean('habitat','upload_listener_position')
auto_rx_config['enable_rotator'] = config.getboolean('rotator','enable_rotator')
auto_rx_config['rotator_hostname'] = config.get('rotator', 'rotator_hostname')
auto_rx_config['rotator_port'] = config.getint('rotator', 'rotator_port')
auto_rx_config['rotator_homing_enabled'] = config.getboolean('rotator', 'rotator_homing_enabled')
auto_rx_config['rotator_home_azimuth'] = config.getfloat('rotator', 'rotator_home_azimuth')
auto_rx_config['rotator_home_elevation'] = config.getfloat('rotator', 'rotator_home_elevation')
auto_rx_config['ozi_enabled'] = config.getboolean('oziplotter', 'ozi_enabled')
auto_rx_config['ozi_update_rate'] = config.getint('oziplotter', 'ozi_update_rate')
auto_rx_config['ozi_port'] = config.getint('oziplotter', 'ozi_port')
auto_rx_config['payload_summary_enabled'] = config.getboolean('oziplotter', 'payload_summary_enabled')
auto_rx_config['payload_summary_port'] = config.getint('oziplotter', 'payload_summary_port')
# Read in lists using a JSON parser.
auto_rx_config['whitelist'] = json.loads(config.get('search_params', 'whitelist'))
auto_rx_config['blacklist'] = json.loads(config.get('search_params', 'blacklist'))
auto_rx_config['greylist'] = json.loads(config.get('search_params', 'greylist'))
# Position Filtering
auto_rx_config['max_altitude'] = config.getint('filtering', 'max_altitude')
auto_rx_config['max_radius_km'] = config.getint('filtering', 'max_radius_km')
# MQTT settings
auto_rx_config['mqtt_enabled'] = config.getboolean('mqtt', 'mqtt_enabled')
auto_rx_config['mqtt_hostname'] = config.get('mqtt', 'mqtt_hostname')
auto_rx_config['mqtt_port'] = config.getint('mqtt', 'mqtt_port')
return auto_rx_config
except:
traceback.print_exc()
logging.error("Could not parse config file, using defaults.")
return auto_rx_config
if __name__ == '__main__':
''' Quick test script to attempt to read in a config file. '''
import sys
print(read_auto_rx_config(sys.argv[1]))

Wyświetl plik

@ -1,175 +0,0 @@
"""Detect peaks in data based on their amplitude and other features."""
from __future__ import division, print_function
import numpy as np
__author__ = "Marcos Duarte, https://github.com/demotu/BMC"
__version__ = "1.0.4"
__license__ = "MIT"
def detect_peaks(x, mph=None, mpd=1, threshold=0, edge='rising',
kpsh=False, valley=False, show=False, ax=None):
"""Detect peaks in data based on their amplitude and other features.
Parameters
----------
x : 1D array_like
data.
mph : {None, number}, optional (default = None)
detect peaks that are greater than minimum peak height.
mpd : positive integer, optional (default = 1)
detect peaks that are at least separated by minimum peak distance (in
number of data).
threshold : positive number, optional (default = 0)
detect peaks (valleys) that are greater (smaller) than `threshold`
in relation to their immediate neighbors.
edge : {None, 'rising', 'falling', 'both'}, optional (default = 'rising')
for a flat peak, keep only the rising edge ('rising'), only the
falling edge ('falling'), both edges ('both'), or don't detect a
flat peak (None).
kpsh : bool, optional (default = False)
keep peaks with same height even if they are closer than `mpd`.
valley : bool, optional (default = False)
if True (1), detect valleys (local minima) instead of peaks.
show : bool, optional (default = False)
if True (1), plot data in matplotlib figure.
ax : a matplotlib.axes.Axes instance, optional (default = None).
Returns
-------
ind : 1D array_like
indeces of the peaks in `x`.
Notes
-----
The detection of valleys instead of peaks is performed internally by simply
negating the data: `ind_valleys = detect_peaks(-x)`
The function can handle NaN's
See this IPython Notebook [1]_.
References
----------
.. [1] http://nbviewer.ipython.org/github/demotu/BMC/blob/master/notebooks/DetectPeaks.ipynb
Examples
--------
>>> from detect_peaks import detect_peaks
>>> x = np.random.randn(100)
>>> x[60:81] = np.nan
>>> # detect all peaks and plot data
>>> ind = detect_peaks(x, show=True)
>>> print(ind)
>>> x = np.sin(2*np.pi*5*np.linspace(0, 1, 200)) + np.random.randn(200)/5
>>> # set minimum peak height = 0 and minimum peak distance = 20
>>> detect_peaks(x, mph=0, mpd=20, show=True)
>>> x = [0, 1, 0, 2, 0, 3, 0, 2, 0, 1, 0]
>>> # set minimum peak distance = 2
>>> detect_peaks(x, mpd=2, show=True)
>>> x = np.sin(2*np.pi*5*np.linspace(0, 1, 200)) + np.random.randn(200)/5
>>> # detection of valleys instead of peaks
>>> detect_peaks(x, mph=0, mpd=20, valley=True, show=True)
>>> x = [0, 1, 1, 0, 1, 1, 0]
>>> # detect both edges
>>> detect_peaks(x, edge='both', show=True)
>>> x = [-2, 1, -2, 2, 1, 1, 3, 0]
>>> # set threshold = 2
>>> detect_peaks(x, threshold = 2, show=True)
"""
x = np.atleast_1d(x).astype('float64')
if x.size < 3:
return np.array([], dtype=int)
if valley:
x = -x
# find indices of all peaks
dx = x[1:] - x[:-1]
# handle NaN's
indnan = np.where(np.isnan(x))[0]
if indnan.size:
x[indnan] = np.inf
dx[np.where(np.isnan(dx))[0]] = np.inf
ine, ire, ife = np.array([[], [], []], dtype=int)
if not edge:
ine = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) > 0))[0]
else:
if edge.lower() in ['rising', 'both']:
ire = np.where((np.hstack((dx, 0)) <= 0) & (np.hstack((0, dx)) > 0))[0]
if edge.lower() in ['falling', 'both']:
ife = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) >= 0))[0]
ind = np.unique(np.hstack((ine, ire, ife)))
# handle NaN's
if ind.size and indnan.size:
# NaN's and values close to NaN's cannot be peaks
ind = ind[np.in1d(ind, np.unique(np.hstack((indnan, indnan-1, indnan+1))), invert=True)]
# first and last values of x cannot be peaks
if ind.size and ind[0] == 0:
ind = ind[1:]
if ind.size and ind[-1] == x.size-1:
ind = ind[:-1]
# remove peaks < minimum peak height
if ind.size and mph is not None:
ind = ind[x[ind] >= mph]
# remove peaks - neighbors < threshold
if ind.size and threshold > 0:
dx = np.min(np.vstack([x[ind]-x[ind-1], x[ind]-x[ind+1]]), axis=0)
ind = np.delete(ind, np.where(dx < threshold)[0])
# detect small peaks closer than minimum peak distance
if ind.size and mpd > 1:
ind = ind[np.argsort(x[ind])][::-1] # sort ind by peak height
idel = np.zeros(ind.size, dtype=bool)
for i in range(ind.size):
if not idel[i]:
# keep peaks with the same height if kpsh is True
idel = idel | (ind >= ind[i] - mpd) & (ind <= ind[i] + mpd) \
& (x[ind[i]] > x[ind] if kpsh else True)
idel[i] = 0 # Keep current peak
# remove the small peaks and sort back the indices by their occurrence
ind = np.sort(ind[~idel])
if show:
if indnan.size:
x[indnan] = np.nan
if valley:
x = -x
peak_plot(x, mph, mpd, threshold, edge, valley, ax, ind)
return ind
def peak_plot(x, mph, mpd, threshold, edge, valley, ax, ind):
"""Plot results of the detect_peaks function, see its help."""
try:
import matplotlib.pyplot as plt
except ImportError:
print('matplotlib is not available.')
else:
if ax is None:
_, ax = plt.subplots(1, 1, figsize=(8, 4))
ax.plot(x, 'b', lw=1)
if ind.size:
label = 'valley' if valley else 'peak'
label = label + 's' if ind.size > 1 else label
ax.plot(ind, x[ind], '+', mfc=None, mec='r', mew=2, ms=8,
label='%d %s' % (ind.size, label))
ax.legend(loc='best', framealpha=.5, numpoints=1)
ax.set_xlim(-.02*x.size, x.size*1.02-1)
ymin, ymax = x[np.isfinite(x)].min(), x[np.isfinite(x)].max()
yrange = ymax - ymin if ymax > ymin else 1
ax.set_ylim(ymin - 0.1*yrange, ymax + 0.1*yrange)
ax.set_xlabel('Data #', fontsize=14)
ax.set_ylabel('Amplitude', fontsize=14)
mode = 'Valley detection' if valley else 'Peak detection'
ax.set_title("%s (mph=%s, mpd=%d, threshold=%s, edge='%s')"
% (mode, str(mph), mpd, str(threshold), edge))
# plt.grid()
plt.show()

Wyświetl plik

@ -1,76 +0,0 @@
#!/usr/bin/env python
#
# Radiosonde Auto RX Tools - GPS Ephemeris / Almanac Grabber
#
# 2017-04 Mark Jessop <vk5qi@rfhead.net>
#
import ftplib
import requests
import datetime
import logging
import os
def get_ephemeris(destination="ephemeris.dat"):
''' Download the latest GPS ephemeris file from the CDDIS's FTP server '''
try:
logging.info("Connecting to GSFC FTP Server...")
ftp = ftplib.FTP("cddis.gsfc.nasa.gov", timeout=10)
ftp.login("anonymous","anonymous")
ftp.cwd("gnss/data/daily/%s/brdc/" % datetime.datetime.utcnow().strftime("%Y"))
file_list= ftp.nlst()
# We expect the latest files to be the last in the list.
download_file = None
file_suffix = datetime.datetime.utcnow().strftime("%yn.Z")
if file_suffix in file_list[-1]:
download_file = file_list[-1]
elif file_suffix in file_list[-2]:
download_file = file_list[-2]
else:
logging.error("Could not find appropriate ephemeris file.")
return None
logging.info("Downloading ephemeris data file: %s" % download_file)
# Download file.
f_eph = open(destination+".Z",'w')
ftp.retrbinary("RETR %s" % download_file, f_eph.write)
f_eph.close()
ftp.close()
# Unzip file.
os.system("gunzip -q -f ./%s" % (destination+".Z"))
logging.info("Ephemeris downloaded to %s successfuly!" % destination)
return destination
except:
logging.error("Could not download ephemeris file.")
return None
def get_almanac(destination="almanac.txt", timeout=20):
''' Download the latest GPS almanac file from the US Coast Guard website. '''
try:
_r = requests.get("https://www.navcen.uscg.gov/?pageName=currentAlmanac&format=sem", timeout=timeout)
data = _r.text
if "CURRENT.ALM" in data:
f = open(destination,'wb')
f.write(data)
f.close()
logging.info("Almanac downloaded to %s successfuly!" % destination)
return destination
else:
logging.error("Downloaded file is not a GPS almanac.")
return None
except:
logging.error("Failed to download almanac data")
return None
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
get_almanac()
get_ephemeris()

Wyświetl plik

@ -1,480 +0,0 @@
#!/usr/bin/env python
#
# Radiosonde Auto RX Tools - Habitat Upload
#
# 2018-04 Mark Jessop <vk5qi@rfhead.net>
#
import crcmod
import datetime
import logging
import Queue
import random
import requests
import time
import traceback
import json
from base64 import b64encode
from hashlib import sha256
from threading import Thread
#
# Habitat Uploader Class
#
class HabitatUploader(object):
'''
Queued Habitat Telemetry Uploader class
Packets to be uploaded to Habitat are added to a queue for uploading.
If an upload attempt times out, the packet is discarded.
If the queue fills up (probably indicating no network connection, and a fast packet downlink rate),
it is immediately emptied, to avoid upload of out-of-date packets.
'''
def __init__(self, user_callsign='N0CALL',
queue_size=16,
upload_timeout = 10,
upload_retries = 5,
upload_retry_interval = 0.25,
inhibit = False,
):
''' Create a Habitat Uploader object. '''
self.user_callsign = user_callsign
self.upload_timeout = upload_timeout
self.upload_retries = upload_retries
self.upload_retry_interval = upload_retry_interval
self.queue_size = queue_size
self.habitat_upload_queue = Queue.Queue(queue_size)
self.inhibit = inhibit
# Start the uploader thread.
self.habitat_uploader_running = True
self.uploadthread = Thread(target=self.habitat_upload_thread)
self.uploadthread.start()
def habitat_upload(self, sentence):
''' Upload a UKHAS-standard telemetry sentence to Habitat '''
# Generate payload to be uploaded
_sentence_b64 = b64encode(sentence)
_date = datetime.datetime.utcnow().isoformat("T") + "Z"
_user_call = self.user_callsign
_data = {
"type": "payload_telemetry",
"data": {
"_raw": _sentence_b64
},
"receivers": {
_user_call: {
"time_created": _date,
"time_uploaded": _date,
},
},
}
# The URL to upload to.
_url = "http://habitat.habhub.org/habitat/_design/payload_telemetry/_update/add_listener/%s" % sha256(_sentence_b64).hexdigest()
# Delay for a random amount of time between 0 and upload_retry_interval*2 seconds.
time.sleep(random.random()*self.upload_retry_interval*2.0)
_retries = 0
# When uploading, we have three possible outcomes:
# - Can't connect. No point immediately re-trying in this situation.
# - The packet is uploaded successfuly (201 / 403)
# - There is a upload conflict on the Habitat DB end (409). We can retry and it might work.
while _retries < self.upload_retries:
# Run the request.
try:
_req = requests.put(_url, data=json.dumps(_data), timeout=self.upload_timeout)
except Exception as e:
logging.error("Habitat - Upload Failed: %s" % str(e))
break
if _req.status_code == 201 or _req.status_code == 403:
# 201 = Success, 403 = Success, sentence has already seen by others.
logging.info("Habitat - Uploaded sentence to Habitat successfully")
_upload_success = True
break
elif _req.status_code == 409:
# 409 = Upload conflict (server busy). Sleep for a moment, then retry.
logging.debug("Habitat - Upload conflict.. retrying.")
time.sleep(random.random()*self.upload_retry_interval)
_retries += 1
else:
logging.error("Habitat - Error uploading to Habitat. Status Code: %d." % _req.status_code)
break
if _retries == self.upload_retries:
logging.error("Habitat - Upload conflict not resolved with %d retries." % self.upload_retries)
return
def habitat_upload_thread(self):
''' Handle uploading of packets to Habitat '''
logging.info("Started Habitat Uploader Thread.")
while self.habitat_uploader_running:
if self.habitat_upload_queue.qsize() > 0:
# If the queue is completely full, jump to the most recent telemetry sentence.
if self.habitat_upload_queue.qsize() == self.queue_size:
while not self.habitat_upload_queue.empty():
sentence = self.habitat_upload_queue.get()
logging.warning("Habitat uploader queue was full - possible connectivity issue.")
else:
# Otherwise, get the first item in the queue.
sentence = self.habitat_upload_queue.get()
# Attempt to upload it.
self.habitat_upload(sentence)
else:
# Wait for a short time before checking the queue again.
time.sleep(0.1)
logging.info("Stopped Habitat Uploader Thread.")
def add(self, sentence):
''' Add a sentence to the upload queue '''
if self.inhibit:
# We have upload inhibited. Return.
return
# Handling of arbitrary numbers of $$'s at the start of a sentence:
# Extract the data part of the sentence (i.e. everything after the $$'s')
sentence = sentence.split('$')[-1]
# Now add the *correct* number of $$s back on.
sentence = '$$' +sentence
if not (sentence[-1] == '\n'):
sentence += '\n'
try:
self.habitat_upload_queue.put_nowait(sentence)
except Queue.Full:
logging.error("Upload Queue is full, sentence discarded.")
except Exception as e:
logging.error("Error adding sentence to queue: %s" % str(e))
def close(self):
''' Shutdown uploader thread. '''
self.habitat_uploader_running = False
#
# Functions for uploading telemetry to Habitat
#
# CRC16 function
def crc16_ccitt(data):
"""
Calculate the CRC16 CCITT checksum of *data*.
(CRC16 CCITT: start 0xFFFF, poly 0x1021)
"""
crc16 = crcmod.predefined.mkCrcFun('crc-ccitt-false')
return hex(crc16(data))[2:].upper().zfill(4)
def telemetry_to_sentence(sonde_data, payload_callsign="RADIOSONDE", comment=None):
''' Convert a telemetry data dictionary into a UKHAS-compliant telemetry sentence '''
# RS produces timestamps with microseconds on the end, we only want HH:MM:SS for uploading to habitat.
data_datetime = datetime.datetime.strptime(sonde_data['datetime_str'],"%Y-%m-%dT%H:%M:%S.%f")
short_time = data_datetime.strftime("%H:%M:%S")
sentence = "$$%s,%d,%s,%.5f,%.5f,%d,%.1f,%.1f,%.1f" % (payload_callsign,sonde_data['frame'],short_time,sonde_data['lat'],
sonde_data['lon'],int(sonde_data['alt']),sonde_data['vel_h'], sonde_data['temp'], sonde_data['humidity'])
# Add on a comment field if provided - note that this will result in a different habitat payload doc being required.
if comment != None:
comment = comment.replace(',','_')
sentence += "," + comment
checksum = crc16_ccitt(sentence[2:])
output = sentence + "*" + checksum + "\n"
return output
def habitat_upload_payload_telemetry(uploader, telemetry, payload_callsign = "RADIOSONDE", callsign="N0CALL", comment=None):
''' Add a packet of radiosonde telemetry to the Habitat uploader queue. '''
sentence = telemetry_to_sentence(telemetry, payload_callsign = payload_callsign, comment=comment)
try:
uploader.add(sentence)
except Exception as e:
logging.error("Could not add telemetry to Habitat Uploader - %s" % str(e))
#
# Functions for uploading a listener position to Habitat.
# from https://raw.githubusercontent.com/rossengeorgiev/hab-tools/master/spot2habitat_chase.py
#
callsign_init = False
url_habitat_uuids = "http://habitat.habhub.org/_uuids?count=%d"
url_habitat_db = "http://habitat.habhub.org/habitat/"
url_check_callsign = "http://spacenear.us/tracker/datanew.php?mode=6hours&type=positions&format=json&max_positions=10&position_id=0&vehicle=%s"
uuids = []
def check_callsign(callsign, timeout=10):
'''
Check if a payload document exists for a given callsign.
This is done in a bit of a hack-ish way at the moment. We just check to see if there have
been any reported packets for the payload callsign on the tracker.
This should really be replaced with the correct call into the habitat tracker.
'''
global url_check_callsign
# Perform the request
_r = requests.get(url_check_callsign % callsign, timeout=timeout)
try:
# Read the response in as JSON
_r_json = _r.json()
# Read out the list of positions for the requested callsign
_positions = _r_json['positions']['position']
# If there is at least one position returned, we assume there is a valid payload document.
if len(_positions) > 0:
logging.info("Callsign %s already present in Habitat DB, not creating new payload doc." % callsign)
return True
else:
# Otherwise, we don't, and go create one.
return False
except Exception as e:
# Handle errors with JSON parsing.
logging.error("Unable to request payload positions from spacenear.us - %s" % str(e))
return False
# Keep an internal cache for which payload docs we've created so we don't spam couchdb with updates
payload_config_cache = {}
def ISOStringNow():
return "%sZ" % datetime.datetime.utcnow().isoformat()
def initPayloadDoc(serial, description="Meteorology Radiosonde", frequency=401500000, timeout=20):
"""Creates a payload in Habitat for the radiosonde before uploading"""
global url_habitat_db
global payload_config_cache
# First, check if the payload's serial number is already in our local cache.
if serial in payload_config_cache:
return payload_config_cache[serial]
# Next, check to see if the payload has been observed on the online tracker already.
_callsign_present = check_callsign(serial)
if _callsign_present:
# Add the callsign to the local cache.
payload_config_cache[serial] = serial
return
# Otherwise, proceed to creating a new payload document.
payload_data = {
"type": "payload_configuration",
"name": serial,
"time_created": ISOStringNow(),
"metadata": {
"description": description
},
"transmissions": [
{
"frequency": frequency, # Currently a dummy value.
"modulation": "RTTY",
"mode": "USB",
"encoding": "ASCII-8",
"parity": "none",
"stop": 2,
"shift": 350,
"baud": 50,
"description": "DUMMY ENTRY, DATA IS VIA radiosonde_auto_rx"
}
],
"sentences": [
{
"protocol": "UKHAS",
"callsign": serial,
"checksum":"crc16-ccitt",
"fields":[
{
"name": "sentence_id",
"sensor": "base.ascii_int"
},
{
"name": "time",
"sensor": "stdtelem.time"
},
{
"name": "latitude",
"sensor": "stdtelem.coordinate",
"format": "dd.dddd"
},
{
"name": "longitude",
"sensor": "stdtelem.coordinate",
"format": "dd.dddd"
},
{
"name": "altitude",
"sensor": "base.ascii_int"
},
{
"name": "speed",
"sensor": "base.ascii_float"
},
{
"name": "temperature_external",
"sensor": "base.ascii_float"
},
{
"name": "humidity",
"sensor": "base.ascii_float"
},
{
"name": "comment",
"sensor": "base.string"
}
],
"filters":
{
"post": [
{
"filter": "common.invalid_location_zero",
"type": "normal"
}
]
},
"description": "radiosonde_auto_rx to Habitat Bridge"
}
]
}
# Perform the POST request to the Habitat DB.
try:
_r = requests.post(url_habitat_db, json=payload_data, timeout=timeout)
if _r.json()['ok'] is True:
logging.info("Habitat - Created a payload document for %s" % serial)
payload_config_cache[serial] = _r.json()
else:
logging.error("Habitat - Failed to create a payload document for %s" % serial)
except Exception as e:
logging.error("Habitat - Failed to create a payload document for %s - %s" % (serial, str(e)))
def postListenerData(doc, timeout=10):
global uuids, url_habitat_db
# do we have at least one uuid, if not go get more
if len(uuids) < 1:
fetchUuids()
# Attempt to add UUID and time data to document.
try:
doc['_id'] = uuids.pop()
except IndexError:
logging.error("Habitat - Unable to post listener data - no UUIDs available.")
return False
doc['time_uploaded'] = ISOStringNow()
try:
_r = requests.post(url_habitat_db, json=doc, timeout=timeout)
return True
except Exception as e:
logging.error("Habitat - Could not post listener data - %s" % str(e))
return False
def fetchUuids(timeout=10):
global uuids, url_habitat_uuids
_retries = 5
while _retries > 0:
try:
_r = requests.get(url_habitat_uuids % 10, timeout=timeout)
uuids.extend(_r.json()['uuids'])
logging.debug("Habitat - Got UUIDs")
return
except Exception as e:
logging.error("Habitat - Unable to fetch UUIDs, retrying in 10 seconds - %s" % str(e))
time.sleep(10)
_retries = _retries - 1
continue
logging.error("Habitat - Gave up trying to get UUIDs.")
return
def initListenerCallsign(callsign, version=''):
doc = {
'type': 'listener_information',
'time_created' : ISOStringNow(),
'data': {
'callsign': callsign,
'antenna': '',
'radio': 'radiosonde_auto_rx %s' % version,
}
}
resp = postListenerData(doc)
if resp is True:
logging.debug("Habitat - Listener Callsign Initialized.")
return True
else:
logging.error("Habitat - Unable to initialize callsign.")
return False
def uploadListenerPosition(callsign, lat, lon, version=''):
""" Initializer Listener Callsign, and upload Listener Position """
# Attempt to initialize the listeners callsign
resp = initListenerCallsign(callsign, version=version)
# If this fails, it means we can't contact the Habitat server,
# so there is no point continuing.
if resp is False:
return
doc = {
'type': 'listener_telemetry',
'time_created': ISOStringNow(),
'data': {
'callsign': callsign,
'chase': False,
'latitude': lat,
'longitude': lon,
'altitude': 0,
'speed': 0,
}
}
# post position to habitat
resp = postListenerData(doc)
if resp is True:
logging.info("Habitat - Listener information uploaded.")
else:
logging.error("Habitat - Unable to upload listener information.")

Wyświetl plik

@ -1,133 +0,0 @@
#!/usr/bin/env python
#
# Radiosonde Auto RX Tools
# Process last_position.txt and determine effective radio horizon
#
# 2017-05 Mark Jessop <vk5qi@rfhead.net>
#
from math import radians, degrees, sin, cos, atan2, sqrt, pi
import sys
import numpy as np
import matplotlib.pyplot as plt
# SET YOUR LOCATION HERE.
my_lat = 0.0
my_lon = 0.0
my_alt = 0.0
# Earthmaths code by Daniel Richman (thanks!)
# Copyright 2012 (C) Daniel Richman; GNU GPL 3
def position_info(listener, balloon):
"""
Calculate and return information from 2 (lat, lon, alt) tuples
Returns a dict with:
- angle at centre
- great circle distance
- distance in a straight line
- bearing (azimuth or initial course)
- elevation (altitude)
Input and output latitudes, longitudes, angles, bearings and elevations are
in degrees, and input altitudes and output distances are in meters.
"""
# Earth:
#radius = 6371000.0
radius = 6364963.0 # Optimized for Australia :-)
(lat1, lon1, alt1) = listener
(lat2, lon2, alt2) = balloon
lat1 = radians(lat1)
lat2 = radians(lat2)
lon1 = radians(lon1)
lon2 = radians(lon2)
# Calculate the bearing, the angle at the centre, and the great circle
# distance using Vincenty's_formulae with f = 0 (a sphere). See
# http://en.wikipedia.org/wiki/Great_circle_distance#Formulas and
# http://en.wikipedia.org/wiki/Great-circle_navigation and
# http://en.wikipedia.org/wiki/Vincenty%27s_formulae
d_lon = lon2 - lon1
sa = cos(lat2) * sin(d_lon)
sb = (cos(lat1) * sin(lat2)) - (sin(lat1) * cos(lat2) * cos(d_lon))
bearing = atan2(sa, sb)
aa = sqrt((sa ** 2) + (sb ** 2))
ab = (sin(lat1) * sin(lat2)) + (cos(lat1) * cos(lat2) * cos(d_lon))
angle_at_centre = atan2(aa, ab)
great_circle_distance = angle_at_centre * radius
# Armed with the angle at the centre, calculating the remaining items
# is a simple 2D triangley circley problem:
# Use the triangle with sides (r + alt1), (r + alt2), distance in a
# straight line. The angle between (r + alt1) and (r + alt2) is the
# angle at the centre. The angle between distance in a straight line and
# (r + alt1) is the elevation plus pi/2.
# Use sum of angle in a triangle to express the third angle in terms
# of the other two. Use sine rule on sides (r + alt1) and (r + alt2),
# expand with compound angle formulae and solve for tan elevation by
# dividing both sides by cos elevation
ta = radius + alt1
tb = radius + alt2
ea = (cos(angle_at_centre) * tb) - ta
eb = sin(angle_at_centre) * tb
elevation = atan2(ea, eb)
# Use cosine rule to find unknown side.
distance = sqrt((ta ** 2) + (tb ** 2) - 2 * tb * ta * cos(angle_at_centre))
# Give a bearing in range 0 <= b < 2pi
if bearing < 0:
bearing += 2 * pi
return {
"listener": listener, "balloon": balloon,
"listener_radians": (lat1, lon1, alt1),
"balloon_radians": (lat2, lon2, alt2),
"angle_at_centre": degrees(angle_at_centre),
"angle_at_centre_radians": angle_at_centre,
"bearing": degrees(bearing),
"bearing_radians": bearing,
"great_circle_distance": great_circle_distance,
"straight_distance": distance,
"elevation": degrees(elevation),
"elevation_radians": elevation
}
if __name__ == '__main__':
# Read in last_position.txt line by line.
f = open('last_positions.txt','r')
azimuths = []
elevations = []
slant_ranges = []
for line in f:
if 'Last Position:' in line:
try:
last_lat = float(line.split(',')[0].split(' ')[2])
last_lon = float(line.split(',')[1])
last_alt = float(line.split(',')[2].split(' ')[1])
pos_data = position_info( (my_lat, my_lon, my_alt), (last_lat, last_lon, last_alt))
azimuths.append(pos_data['bearing'])
elevations.append(pos_data['elevation'])
slant_ranges.append(pos_data['straight_distance'])
except:
pass
f.close()
# Plot
plt.scatter(azimuths, elevations)
plt.xlabel('Bearing (degrees)')
plt.ylabel('Elevation (degrees)')
plt.show()

Wyświetl plik

@ -1,113 +0,0 @@
# OziPlotter push utils for Sonde auto RX.
import socket
import json
# Network Settings
HORUS_UDP_PORT = 55672
HORUS_OZIPLOTTER_PORT = 8942
def send_payload_summary(callsign, latitude, longitude, altitude, packet_time, speed=-1, heading=-1, comment= '', model='', freq=401.0, temp=-273, udp_port = HORUS_UDP_PORT):
"""
Send an update on the core payload telemetry statistics into the network via UDP broadcast.
This can be used by other devices hanging off the network to display vital stats about the payload.
"""
packet = {
'type' : 'PAYLOAD_SUMMARY',
'callsign' : callsign,
'latitude' : latitude,
'longitude' : longitude,
'altitude' : altitude,
'speed' : speed,
'heading': heading,
'time' : packet_time,
'comment' : comment,
# Additional fields specifically for radiosondes
'model': model,
'freq': freq,
'temp': temp
}
# Set up our UDP socket
s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
s.settimeout(1)
# Set up socket for broadcast, and allow re-use of the address
s.setsockopt(socket.SOL_SOCKET,socket.SO_BROADCAST,1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except:
pass
s.bind(('',HORUS_UDP_PORT))
try:
s.sendto(json.dumps(packet), ('<broadcast>', udp_port))
except socket.error:
s.sendto(json.dumps(packet), ('127.0.0.1', udp_port))
# The new 'generic' OziPlotter upload function, with no callsign, or checksumming (why bother, really)
def oziplotter_upload_basic_telemetry(time, latitude, longitude, altitude, hostname="192.168.88.2", udp_port = HORUS_OZIPLOTTER_PORT, broadcast=True):
"""
Send a sentence of position data to Oziplotter, via UDP.
"""
sentence = "TELEMETRY,%s,%.5f,%.5f,%d\n" % (time, latitude, longitude, altitude)
try:
ozisock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
if broadcast:
# Set up socket for broadcast, and allow re-use of the address
ozisock.setsockopt(socket.SOL_SOCKET,socket.SO_BROADCAST,1)
ozisock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
ozisock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except:
pass
ozisock.sendto(sentence,('<broadcast>',udp_port))
else:
# Otherwise, send to a user-defined hostname/port.
ozisock.sendto(sentence,(hostname,udp_port))
ozisock.close()
return sentence
except Exception as e:
print("Failed to send to Ozi: %s" % str(e))
def push_telemetry_to_ozi(telemetry, hostname='127.0.0.1', udp_port = HORUS_OZIPLOTTER_PORT):
"""
Grab the relevant fields from the incoming telemetry dictionary, and pass onto oziplotter.
"""
oziplotter_upload_basic_telemetry(telemetry['short_time'], telemetry['lat'], telemetry['lon'], telemetry['alt'], hostname=hostname, udp_port=udp_port)
def push_payload_summary(telemetry, udp_port = HORUS_UDP_PORT):
"""
Extract the needed data from a telemetry dictionary, and send out a payload summary packet.
"""
# Prepare heading & speed fields, if they are provided in the incoming telemetry blob.
if 'heading' in telemetry.keys():
_heading = telemetry['heading']
else:
_heading = -1
if 'vel_h' in telemetry.keys():
_speed = telemetry['vel_h']*3.6
else:
_speed = -1
send_payload_summary(telemetry['id'],
telemetry['lat'],
telemetry['lon'],
telemetry['alt'],
telemetry['short_time'],
heading=_heading,
speed=_speed,
model=telemetry['type'],
freq=telemetry['freq'],
temp=telemetry['temp'],
comment='radiosonde',
udp_port=udp_port)

Wyświetl plik

@ -1,146 +0,0 @@
#!/usr/bin/env python
#
# Radiosonde Auto RX Tools - Rotator Control
#
# 2017-12 Mark Jessop <vk5qi@rfhead.net>
#
import socket
import logging
import traceback
import time
import numpy as np
from math import radians, degrees, sin, cos, atan2, sqrt, pi
# Earthmaths code by Daniel Richman (thanks!)
# Copyright 2012 (C) Daniel Richman; GNU GPL 3
def position_info(listener, balloon):
"""
Calculate and return information from 2 (lat, lon, alt) tuples
Returns a dict with:
- angle at centre
- great circle distance
- distance in a straight line
- bearing (azimuth or initial course)
- elevation (altitude)
Input and output latitudes, longitudes, angles, bearings and elevations are
in degrees, and input altitudes and output distances are in meters.
"""
# Earth:
radius = 6371000.0
(lat1, lon1, alt1) = listener
(lat2, lon2, alt2) = balloon
lat1 = radians(lat1)
lat2 = radians(lat2)
lon1 = radians(lon1)
lon2 = radians(lon2)
# Calculate the bearing, the angle at the centre, and the great circle
# distance using Vincenty's_formulae with f = 0 (a sphere). See
# http://en.wikipedia.org/wiki/Great_circle_distance#Formulas and
# http://en.wikipedia.org/wiki/Great-circle_navigation and
# http://en.wikipedia.org/wiki/Vincenty%27s_formulae
d_lon = lon2 - lon1
sa = cos(lat2) * sin(d_lon)
sb = (cos(lat1) * sin(lat2)) - (sin(lat1) * cos(lat2) * cos(d_lon))
bearing = atan2(sa, sb)
aa = sqrt((sa ** 2) + (sb ** 2))
ab = (sin(lat1) * sin(lat2)) + (cos(lat1) * cos(lat2) * cos(d_lon))
angle_at_centre = atan2(aa, ab)
great_circle_distance = angle_at_centre * radius
# Armed with the angle at the centre, calculating the remaining items
# is a simple 2D triangley circley problem:
# Use the triangle with sides (r + alt1), (r + alt2), distance in a
# straight line. The angle between (r + alt1) and (r + alt2) is the
# angle at the centre. The angle between distance in a straight line and
# (r + alt1) is the elevation plus pi/2.
# Use sum of angle in a triangle to express the third angle in terms
# of the other two. Use sine rule on sides (r + alt1) and (r + alt2),
# expand with compound angle formulae and solve for tan elevation by
# dividing both sides by cos elevation
ta = radius + alt1
tb = radius + alt2
ea = (cos(angle_at_centre) * tb) - ta
eb = sin(angle_at_centre) * tb
elevation = atan2(ea, eb)
# Use cosine rule to find unknown side.
distance = sqrt((ta ** 2) + (tb ** 2) - 2 * tb * ta * cos(angle_at_centre))
# Give a bearing in range 0 <= b < 2pi
if bearing < 0:
bearing += 2 * pi
return {
"listener": listener, "balloon": balloon,
"listener_radians": (lat1, lon1, alt1),
"balloon_radians": (lat2, lon2, alt2),
"angle_at_centre": degrees(angle_at_centre),
"angle_at_centre_radians": angle_at_centre,
"bearing": degrees(bearing),
"bearing_radians": bearing,
"great_circle_distance": great_circle_distance,
"straight_distance": distance,
"elevation": degrees(elevation),
"elevation_radians": elevation
}
def update_rotctld(hostname='127.0.0.1', port=4533, azimuth=0.0, elevation=0.0):
'''
Attempt to push an azimuth & elevation position command into rotctld.
We take a fairly simplistic approach to this, and don't attempt to read the current
rotator position.
'''
# Bound Azimuth & Elevation to 0-360 / 0-90
elevation = np.clip(elevation,0,90)
azimuth = azimuth % 360.0
try:
# Connect to rotctld.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((hostname,port))
# Produce rotctld command.
msg = "P %.1f %.1f\n" % (azimuth, elevation)
logging.debug("Rotctld - Sending command: %s" % msg)
# Send.
s.send(msg)
# Listen for a response.
resp = s.recv(1024)
# Close socket.
s.close()
#
if 'RPRT 0' in resp:
logging.info("Rotctld - Commanded rotator to %.1f, %.1f." % (azimuth, elevation))
return True
elif 'RPRT -1' in resp:
logging.warning("Rotctld - rotctld reported an error (RPRT -1).")
return False
else:
logging.warning("Rotctld - Unknown or no response from rotctld.")
return False
except:
logging.error("Rotctld - Connection Error: %s" % traceback.format_exc())
if __name__ == "__main__":
# Test script, to poke some values into rotctld.
logging.basicConfig(level=logging.DEBUG)
az_range = np.linspace(0,360,10)
el_range = np.linspace(0,90,10)
for i in range(0,len(az_range)):
update_rotctld(azimuth=az_range[i], elevation=el_range[i])
time.sleep(10)

Wyświetl plik

@ -1,5 +1,5 @@
#
# Radiosonde Auto RX Station Configuration File
# Radiosonde Auto RX v2 Station Configuration File
#
# Copy this file to station.cfg and modify as required.
#
@ -15,43 +15,51 @@ per_sonde_log = True
sdr_fm_path = rtl_fm
sdr_power_path = rtl_power
# PPM Frequency Correction (ppm offset)
# Refer here for a method of determining this correction: https://gist.github.com/darksidelemm/b517e6a9b821c50c170f1b9b7d65b824
sdr_ppm = 0
# Number of RTLSDRs to use.
# If more than one RTLSDR is in use, multiple [sdr_X] sections must be populated below
sdr_quantity = 1
# Individual SDR Settings.
[sdr_1]
# Device Index / Serial
# If using a single RTLSDR, set this value to 0
# If using multiple SDRs, you MUST allocate each SDR a unique serial number using rtl_eeprom
# i.e. to set the serial number of a (single) connected RTLSDR: rtl_eeprom -s 00000002
# Then set the device_idx below to 00000002, and repeat for the other [sdr_n] sections below
device_idx = 0
# Frequency Correction (ppm offset)
# Refer here for a method of determining this correction: https://gist.github.com/darksidelemm/b517e6a9b821c50c170f1b9b7d65b824
ppm = 0
# SDR Gain Setting
# Gain settings can generally range between 0dB and 30dB depending on the tuner in use.
# Gain settings can generally range between 0dB and 40dB depending on the tuner in use.
# Run rtl_test to confirm what gain settings are available, or use a value of -1 to use automatic gain control.
# Note that this is an overall gain value, not an individual mixer/tuner gain. This is a limitation of the rtl_power/rtl_fm utils.
sdr_gain = -1
# Enable RTLSDR Bias Tee (for v3 Dongles)
# Requires a recent version of rtl-sdr to be installed (needs the -T option)
sdr_bias = False
gain = -1
# Bias Tee - Enable the bias tee in the RTLSDR v3 Dongles.
bias = False
[sdr_2]
# As above, for the next SDR. Note the warning about serial numbers.
device_idx = 00000002
ppm = 0
gain = -1
bias = False
# Add more SDR definitions here if needed.
# Radiosonde Search Parameters
[search_params]
# Number of times to scan before quitting (Deprecated?)
search_attempts = 10
# Wait time between failed scans.
search_delay = 10
# Minimum and maximum search frequencies, in MHz.
# Australia: Use 400.05 - 403 MHz
# Europe: Use 400.05 - 406 MHz
min_freq = 400.05
max_freq = 403.0
# Receive bin width (Hz)
search_step = 800
# Minimum SNR for a peak to be detected. The lower the number, the more peaks detected.
min_snr = 10
# Minimum distance between peaks (Hz)
min_distance = 1000
# Dwell time - How long to wait for a sonde detection on each peak.
dwell_time = 5
# Quantize search results to x Hz steps. Useful as most sondes are on 10 kHz frequency steps.
quantization = 10000
# Timeout and re-scan after X seconds of no data.
rx_timeout = 120
# Have the decoder timeout after X seconds of no valid data.
rx_timeout = 180
# Frequency Lists - These must be provided as JSON-compatible lists of floats (in MHz), i.e. [400.50, 401.520, 403.200]
@ -67,50 +75,6 @@ blacklist = []
# This is useful when you know the regular frequency of a local sonde, but still want to allow detections on other frequencies.
greylist = []
# Station Location (optional). Used by the Habitat Uploader, and by Rotator Control
[location]
station_lat = 0.0
station_lon = 0.0
station_alt = 0.0
# Upload settings. Used by both the internet upload threads, and the rotator updater.
[upload]
# Upload/update every x seconds
upload_rate = 30
# Enable upload to various services.
# Uploading to APRS. Change settings in [aprs] block below.
# PLEASE READ WARNING HERE BEFORE ENABLING: https://github.com/projecthorus/radiosonde_auto_rx/wiki/Configuration-Settings#uploading-to-aprs-is
enable_aprs = False
# Uploading to Habitat. PLEASE CHANGE uploader_callsign IN [habitat] BLOCK BELOW BEFORE ENABLING THIS
enable_habitat = False
# Upload when (seconds_since_utc_epoch%upload_rate) == 0. Otherwise just delay upload_rate seconds between uploads.
# Setting this to True with multple uploaders should give a higher chance of all uploaders uploading the same frame,
# however the upload_rate should not be set too low, else there may be a chance of missing upload slots.
synchronous_upload = True
# Settings for uploading to APRS-IS
[aprs]
# APRS-IS Login Information
aprs_user = N0CALL
aprs_pass = 00000
# APRS-IS server to upload to.
aprs_server = rotate.aprs2.net
# Object name to be used when uploading to APRS-IS (Max 9 chars)
# Should be either a callsign with a -11 or -12 suffix (i.e. N0CALL-12),
# or <id>, which will be replaced with the radiosondes serial number
aprs_object_id = <id>
# The APRS-IS beacon comment. The following fields can be included:
# <freq> - Sonde Frequency, i.e. 401.520 MHz
# <type> - Sonde Type (RS94/RS41)
# <id> - Sonde Serial Number (i.e. M1234567)
# <vel_v> - Sonde Vertical Velocity (i.e. -5.1m/s)
# <temp> - Sonde reported temperature. If no temp data available, this will report -273 degC. Only works for RS41s.
aprs_custom_comment = Radiosonde Auto-RX <freq>
# Settings for uploading to the Habitat HAB tracking database ( https://tracker.habhub.org/ )
@ -120,21 +84,67 @@ aprs_custom_comment = Radiosonde Auto-RX <freq>
# If you use a custom payload callsign, you will need to create an appropriate payload document for it to appear on the map
#
[habitat]
# Payload callsign - if set to <id> will use the serial number of the sonde and create a payload document automatically
payload_callsign = <id>
payload_description = Meteorological Radiosonde
habitat_enabled = False
# Uploader callsign, as shown above. PLEASE CHANGE THIS TO SOMETHING UNIQUE.
uploader_callsign = SONDE_AUTO_RX
uploader_callsign = CHANGEME_AUTO_RX
# Upload listener position to Habitat? (So you show up on the map)
upload_listener_position = False
upload_listener_position = True
# Habitat Upload Rate - Upload a packet every X seconds.
upload_rate = 30
# Payload callsign - if set to <id> will use the serial number of the sonde and create a payload document automatically
# WARNING - If running multiple RTLSDRs, setting this to a fixed callsign will result in odd behaviour on the online tracker.
# DO NOT SET THIS TO ANYTHING OTHER THAN <id> IF YOU ARE USING MORE THAN ONE SDR!
payload_callsign = <id>
# Station Location (optional). Used by the Habitat Uploader, and by Rotator Control
[location]
station_lat = 0.0
station_lon = 0.0
station_alt = 0.0
# Settings for uploading to APRS-IS
[aprs]
aprs_enabled = False
# APRS-IS Login Information
aprs_user = N0CALL
aprs_pass = 00000
# APRS Upload Rate - Upload a packet every X seconds.
upload_rate = 30
# APRS-IS server to upload to.
aprs_server = rotate.aprs2.net
# Object name to be used when uploading to APRS-IS (Max 9 chars)
# Should be either a callsign with a -11 or -12 suffix (i.e. N0CALL-12),
# or <id>, which will be replaced with the radiosondes serial number
# WARNING - If running multiple RTLSDRs, setting this to a fixed callsign will result in odd behaviour on the online tracker.
# DO NOT SET THIS TO ANYTHING OTHER THAN <id> IF YOU ARE USING MORE THAN ONE SDR!
aprs_object_id = <id>
# The APRS-IS beacon comment. The following fields can be included:
# <freq> - Sonde Frequency, i.e. 401.520 MHz
# <type> - Sonde Type (RS94/RS41)
# <id> - Sonde Serial Number (i.e. M1234567)
# <vel_v> - Sonde Vertical Velocity (i.e. -5.1m/s)
# <temp> - Sonde reported temperature. If no temp data available, this will report -273 degC. Only works for RS41s.
aprs_custom_comment = <type> Radiosonde <freq> <vel_v> http://bit.ly/2Bj4Sfk
# Settings for pushing data into OziPlotter
# Oziplotter receives data via a basic CSV format, via UDP.
[oziplotter]
# WARNING - This should not be enabled in a multi-SDR configuration, as OziExplorer currently has no way of differentiating
# between sonde IDs.
ozi_enabled = False
ozi_update_rate = 5
# Broadcast UDP port - Set this to 55681 if sending data to OziMux, or 8942 if sending directly to OziPlotter
# UDP Broadcast output port.
ozi_port = 55681
# Payload summary output, which can be used by a few of the Horus Ground Station tools
payload_summary_enabled = False
@ -151,19 +161,43 @@ max_altitude = 50000
# Discard positions more than 1000 km from the observation station location (if set)
max_radius_km = 1000
# MQTT (Even more interfacing options!)
# Post all sonde messages to a MQTT server
[mqtt]
mqtt_enabled = False
mqtt_hostname = 127.0.0.1
mqtt_port = 1883
# Advanced Settings
# These control low-level settings within various modules.
# Playing with them may result in odd behaviour.
[advanced]
# Scanner - Receive bin width (Hz)
search_step = 800
# Scanner - Minimum SNR for a peak to be detected. The lower the number, the more peaks detected.
snr_threshold = 10
# Scanner - Maximum number of peaks to search through during a scan pass.
# Increase this if you have lots of spurious signals, though this will increase scan times.
max_peaks = 10
# Scanner - Minimum distance between peaks (Hz)
min_distance = 1000
# Scanner - Scan Dwell Time - How long to observe the specified spectrum for.
scan_dwell_time = 20
# Scanner - Detection Dwell time - How long to wait for a sonde detection on each peak.
detect_dwell_time = 5
# Scanner - Delay between scans. We should delay a short amount between scans to allow for decoders and other actions to jump in.
scan_delay = 10
# Quantize search results to x Hz steps. Useful as most sondes are on 10 kHz frequency steps.
quantization = 10000
# Upload when (seconds_since_utc_epoch%upload_rate) == 0. Otherwise just delay upload_rate seconds between uploads.
# Setting this to True with multple uploaders should give a higher chance of all uploaders uploading the same frame,
# however the upload_rate should not be set too low, else there may be a chance of missing upload slots.
synchronous_upload = True
# Only accept a payload ID as valid until it has been seen N times.
# This helps avoid corrupted callsigns getting onto the map.
payload_id_valid = 5
# Rotator Settings
# auto_rx can communicate with an instance of rotctld, on either the local machine or elsewhere on the network.
# The update rate is tied to the upload_rate setting above, though internet upload does not need to be enabled
# for the rotator to be updated.
[rotator]
enable_rotator = False
# WARNING - This should not be enabled in a multi-SDR configuration.
rotator_enabled = False
# How often to update the rotator position.
update_rate = 10
# Hostname / Port of the rotctld instance.
rotator_hostname = 127.0.0.1
rotator_port = 4533