diff --git a/auto_rx/aprs_utils.py b/auto_rx/aprs_utils.py deleted file mode 100644 index b2130e5..0000000 --- a/auto_rx/aprs_utils.py +++ /dev/null @@ -1,67 +0,0 @@ -# APRS push utils for Sonde auto RX. - -from socket import * - - -# Push a Radiosonde data packet to APRS as an object. -def push_balloon_to_aprs(sonde_data, object_name="", aprs_comment="BOM Balloon", aprsUser="N0CALL", aprsPass="00000", serverHost = 'rotate.aprs2.net', serverPort = 14580): - if object_name == "": - object_name = sonde_data["id"].strip() - - # Pad or limit the object name to 9 characters. - if len(object_name) > 9: - object_name = object_name[:9] - elif len(object_name) < 9: - object_name = object_name + " "*(9-len(object_name)) - - # Convert float latitude to APRS format (DDMM.MM) - lat = float(sonde_data["lat"]) - lat_degree = abs(int(lat)) - lat_minute = abs(lat - int(lat)) * 60.0 - lat_min_str = ("%02.2f" % lat_minute).zfill(5) - lat_dir = "S" - if lat>0.0: - lat_dir = "N" - lat_str = "%02d%s" % (lat_degree,lat_min_str) + lat_dir - - # Convert float longitude to APRS format (DDDMM.MM) - lon = float(sonde_data["lon"]) - lon_degree = abs(int(lon)) - lon_minute = abs(lon - int(lon)) * 60.0 - lon_min_str = ("%02.2f" % lon_minute).zfill(5) - lon_dir = "E" - if lon<0.0: - lon_dir = "W" - lon_str = "%03d%s" % (lon_degree,lon_min_str) + lon_dir - - # Convert Alt (in metres) to feet - alt = int(float(sonde_data["alt"])/0.3048) - - # TODO: Process velocity/heading, if supplied. - - # TODO: Limit comment length. - - # Produce the APRS object string. - - if ('heading' in sonde_data.keys()) and ('vel_h' in sonde_data.keys()): - course_speed = "%03d/%03d" % (int(sonde_data['heading']), int(sonde_data['vel_h']*1.944)) - else: - course_speed = "000/000" - - out_str = ";%s*111111z%s/%sO%s/A=%06d %s" % (object_name,lat_str,lon_str,course_speed,alt,aprs_comment) - - # Connect to an APRS-IS server, login, then push our object position in. - - # create socket & connect to server - sSock = socket(AF_INET, SOCK_STREAM) - sSock.connect((serverHost, serverPort)) - # logon - sSock.send('user %s pass %s vers VK5QI-Python 0.01\n' % (aprsUser, aprsPass) ) - # send packet - sSock.send('%s>APRS:%s\n' % (aprsUser, out_str) ) - - # close socket - sSock.shutdown(0) - sSock.close() - - return out_str \ No newline at end of file diff --git a/auto_rx/async_file_reader.py b/auto_rx/async_file_reader.py deleted file mode 100644 index 271f630..0000000 --- a/auto_rx/async_file_reader.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -AsynchronousFileReader -====================== -Simple thread based asynchronous file reader for Python. -see https://github.com/soxofaan/asynchronousfilereader -MIT License -Copyright (c) 2014 Stefaan Lippens -""" - -__version__ = '0.2.1' - -import threading -try: - # Python 2 - from Queue import Queue -except ImportError: - # Python 3 - from queue import Queue - - -class AsynchronousFileReader(threading.Thread): - """ - Helper class to implement asynchronous reading of a file - in a separate thread. Pushes read lines on a queue to - be consumed in another thread. - """ - - def __init__(self, fd, queue=None, autostart=True): - self._fd = fd - if queue is None: - queue = Queue() - self.queue = queue - self.running = True - - threading.Thread.__init__(self) - - if autostart: - self.start() - - def run(self): - """ - The body of the tread: read lines and put them on the queue. - """ - while self.running: - line = self._fd.readline() - if not line: - break - self.queue.put(line) - - def eof(self): - """ - Check whether there is no more content to expect. - """ - return not self.is_alive() and self.queue.empty() - - def stop(self): - """ - Stop the running thread. - """ - self.running = False - - - def readlines(self): - """ - Get currently available lines. - """ - while not self.queue.empty(): - yield self.queue.get() \ No newline at end of file diff --git a/auto_rx/auto_rx.py b/auto_rx/auto_rx.py index 740def4..3329929 100644 --- a/auto_rx/auto_rx.py +++ b/auto_rx/auto_rx.py @@ -1,370 +1,312 @@ #!/usr/bin/env python # -# Radiosonde Auto RX Tools +# Radiosonde Auto RX Service - V2.0 # -# 2017-04 Mark Jessop +# Copyright (C) 2018 Mark Jessop +# Released under GNU GPL v3 or later # -# Refer github page for instructions on setup and usage. -# https://github.com/projecthorus/radiosonde_auto_rx/ +# Refer github page for instructions on setup and usage. +# https://github.com/projecthorus/radiosonde_auto_rx/ # - -import numpy as np -import sys import argparse -import logging import datetime -import time -import os -import glob -import shutil -import platform -import signal -import Queue -import subprocess -import traceback -import json +import logging import re -from aprs_utils import * -from habitat_utils import * -from ozi_utils import * -from rotator_utils import * -from threading import Thread -from StringIO import StringIO -from findpeaks import * -from config_reader import * -from gps_grabber import * -from async_file_reader import AsynchronousFileReader +import sys +import time +import traceback + +from autorx.scan import SondeScanner +from autorx.decode import SondeDecoder +from autorx.logger import TelemetryLogger +from autorx.habitat import HabitatUploader +from autorx.utils import rtlsdr_test, position_info +from autorx.config import read_auto_rx_config + +try: + # Python 2 + from Queue import Queue +except ImportError: + # Python 3 + from queue import Queue -# TODO: Break this out to somewhere else, that is set automatically based on releases... -AUTO_RX_VERSION = '20180512' # Logging level # INFO = Basic status messages -# DEBUG = Adds information on each command run by subprocess. +# DEBUG = Adds detailed information on submodule operations. logging_level = logging.INFO -# Set this to true to enable dumping of all the rtl_power output to files in ./log/ -# Note that this can result in a LOT of log files being generated depending on your scanning settings. -uber_debug = False -# Internet Push Globals -APRS_OUTPUT_ENABLED = False -HABITAT_OUTPUT_ENABLED = False +# +# Global Variables +# -INTERNET_PUSH_RUNNING = True -internet_push_queue = Queue.Queue() +RS_PATH = "./" -# Second Queue for OziPlotter outputs, since we want this to run at a faster rate. -OZI_PUSH_RUNNING = True -ozi_push_queue = Queue.Queue() +# Optional override for RS92 ephemeris data. +rs92_ephemeris = None -# Habitat Uploader object, instantiated in __main__ -habitat_uploader = None +# Global configuration dictionary +config = None +# Exporter Lists +exporter_objects = [] # This list will hold references to each exporter instance that is created. +exporter_functions = [] # This list will hold references to the exporter add functions, which will be passed onto the decoders. -# Flight Statistics data -# stores copies of the telemetry dictionary returned by process_rs_line. -flight_stats = { - 'first': None, - 'apogee': None, - 'last': None -} +# RTLSDR Usage Register - This dictionary holds information about each SDR and its currently running Decoder / Scanner +# Key = SDR device index / ID +# 'device_idx': { +# 'in_use' (bool) : True if the SDR is currently in-use by a decoder or scanner. +# 'task' (class) : If this SDR is in use, a reference to the task. +# 'bias' (bool) : True if the bias-tee should be enabled on this SDR, False otherwise. +# 'ppm' (int) : The PPM offset for this SDR. +# 'gain' (float) : The gain setting to use with this SDR. A setting of -1 turns on hardware AGC. +# } +# +# +sdr_list = {} -# Station config, we need to populate this with data from station.cfg -config = {} +# Currently running task register. +# Keys will either be 'SCAN' (only one scanner shall be running at a time), or a sonde frequency in MHz. +# Each element contains: +# 'task' : (class) Reference to the currently running task. +# 'device_idx' (str): The allocated SDR. +# +task_list = {} -def run_rtl_power(start, stop, step, filename="log_power.csv", dwell = 20, sdr_power='rtl_power', ppm = 0, gain = -1, bias = False): - """ Run rtl_power, with a timeout""" - # Example: rtl_power -T -f 400400000:403500000:800 -i20 -1 -c 20% -p 0 -g 26.0 log_power.csv - # Add a -T option if bias is enabled - bias_option = "-T " if bias else "" +# Scan Result Queue +# Scan results are processed asynchronously from the main scanner object. +scan_results = Queue() - # Add a gain parameter if we have been provided one. - if gain != -1: - gain_param = '-g %.1f ' % gain - else: - gain_param = '' - # Add -k 30 option, to SIGKILL rtl_power 30 seconds after the regular timeout expires. - # Note that this only works with the GNU Coreutils version of Timeout, not the IBM version, - # which is provided with OSX (Darwin). - if 'Darwin' in platform.platform(): - timeout_kill = '' - else: - timeout_kill = '-k 30 ' +def allocate_sdr(check_only = False, task_description = ""): + """ Allocate an un-used SDR for a task. - rtl_power_cmd = "timeout %s%d %s %s-f %d:%d:%d -i %d -1 -c 20%% -p %d %s%s" % (timeout_kill, dwell+10, sdr_power, bias_option, start, stop, step, dwell, int(ppm), gain_param, filename) - logging.info("Running frequency scan.") - logging.debug("Running command: %s" % rtl_power_cmd) - ret_code = os.system(rtl_power_cmd) - if ret_code == 1: - logging.critical("rtl_power call failed!") - return False - else: - return True + Args: + check_only (bool) : If True, don't set the free SDR as in-use. Used to check if there are any free SDRs. -def read_rtl_power(filename): - """ Read in frequency samples from a single-shot log file produced by rtl_power """ - - # Output buffers. - freq = np.array([]) - power = np.array([]) - - freq_step = 0 - - - # Open file. - f = open(filename,'r') - - # rtl_power log files are csv's, with the first 6 fields in each line describing the time and frequency scan parameters - # for the remaining fields, which contain the power samples. - - for line in f: - # Split line into fields. - fields = line.split(',') - - if len(fields) < 6: - logging.error("Invalid number of samples in input file - corrupt?") - raise Exception("Invalid number of samples in input file - corrupt?") - - start_date = fields[0] - start_time = fields[1] - start_freq = float(fields[2]) - stop_freq = float(fields[3]) - freq_step = float(fields[4]) - n_samples = int(fields[5]) - - #freq_range = np.arange(start_freq,stop_freq,freq_step) - samples = np.loadtxt(StringIO(",".join(fields[6:])),delimiter=',') - freq_range = np.linspace(start_freq,stop_freq,len(samples)) - - # Add frequency range and samples to output buffers. - freq = np.append(freq, freq_range) - power = np.append(power, samples) - - f.close() - - # Sanitize power values, to remove the nan's that rtl_power puts in there occasionally. - power = np.nan_to_num(power) - - return (freq, power, freq_step) - - -def quantize_freq(freq_list, quantize=5000): - """ Quantise a list of frequencies to steps of Hz """ - return np.round(freq_list/quantize)*quantize - -def detect_sonde(frequency, sdr_fm='rtl_fm', ppm=0, gain=-1, bias=False, dwell_time=10): - """ Receive some FM and attempt to detect the presence of a radiosonde. """ - - # Example command (for command-line testing): - # rtl_fm -T -p 0 -M fm -g 26.0 -s 15k -f 401500000 | sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -t wav - highpass 20 | ./rs_detect -z -t 8 - - # Add a -T option if bias is enabled - bias_option = "-T " if bias else "" - - # Add a gain parameter if we have been provided one. - if gain != -1: - gain_param = '-g %.1f ' % gain - else: - gain_param = '' - - rx_test_command = "timeout %ds %s %s-p %d %s-M fm -F9 -s 15k -f %d 2>/dev/null |" % (dwell_time, sdr_fm, bias_option, int(ppm), gain_param, frequency) - rx_test_command += "sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -t wav - highpass 20 2>/dev/null |" - rx_test_command += "./rs_detect -z -t 8 2>/dev/null" - - logging.info("Attempting sonde detection on %.3f MHz" % (frequency/1e6)) - logging.debug("Running command: %s" % rx_test_command) - - ret_code = os.system(rx_test_command) - - # Shift down by a byte... for some reason. - ret_code = ret_code >> 8 - - # Default is non-inverted FM. - inv = "" - - # Check if the inverted bit is set - if (ret_code & 0x80) > 0: - # If the inverted bit is set, we have to do some munging of the return code to get the sonde type. - ret_code = abs(-1 * (0x100 - ret_code)) - # Currently ignoring the inverted flag, as rs_detect appears to detect some sondes as inverted incorrectly. - #inv = "-" - - else: - ret_code = abs(ret_code) - - if ret_code == 3: - logging.info("Detected a RS41!") - return inv+"RS41" - elif ret_code == 4: - logging.info("Detected a RS92!") - return inv+"RS92" - elif ret_code == 2: - logging.info("Detected a DFM Sonde! (Unsupported)") - return inv+"DFM" - elif ret_code == 5: - logging.info("Detected a M10 Sonde! (Unsupported)") - return inv+"M10" - elif ret_code == 6: - logging.info("Detected a iMet Sonde! (Unsupported)") - return inv+"iMet" - else: - return None - - -def reset_rtlsdr(): - """ Attempt to perform a USB Reset on all attached RTLSDRs. This uses the usb_reset binary from ../scan""" - lsusb_output = subprocess.check_output(['lsusb']) - try: - devices = lsusb_output.split('\n') - for device in devices: - if 'RTL2838' in device: - # Found an rtlsdr! Attempt to extract bus and device number. - # Expecting something like: 'Bus 001 Device 005: ID 0bda:2838 Realtek Semiconductor Corp. RTL2838 DVB-T' - device_fields = device.split(' ') - # Attempt to cast fields to integers, to give some surety that we have the correct data. - device_bus = int(device_fields[1]) - device_number = int(device_fields[3][:-1]) - # Construct device address - reset_argument = '/dev/bus/usb/%03d/%03d' % (device_bus, device_number) - # Attempt to reset the device. - logging.info("Resetting device: %s" % reset_argument) - ret_code = subprocess.call(['./reset_usb', reset_argument]) - logging.debug("Got return code: %s" % ret_code) - else: - continue - except: - logging.error("Errors occured while attempting to reset USB device.") - - -def sonde_search(config, attempts = 5): - """ Perform a frequency scan across the defined range, and test each frequency for a radiosonde's presence. """ - search_attempts = attempts - - sonde_freq = None - sonde_type = None - - while search_attempts > 0: - - if len(config['whitelist']) == 0 : - # No whitelist frequencies provided - perform a scan. - run_rtl_power(config['min_freq']*1e6, config['max_freq']*1e6, config['search_step'], sdr_power=config['sdr_power_path'], ppm=config['sdr_ppm'], gain=config['sdr_gain'], bias=config['sdr_bias']) - - # Read in result - try: - (freq, power, step) = read_rtl_power('log_power.csv') - # Sanity check results. - if step == 0 or len(freq)==0 or len(power)==0: - raise Exception("Invalid file.") - - if uber_debug: - # Copy log_power.csv to log directory, for later debugging. - shutil.copy('log_power.csv', './log/log_power_%s.csv'%datetime.datetime.utcnow().strftime('%Y-%m-%d_%H%M%S')) - - - except Exception as e: - traceback.print_exc() - logging.error("Failed to read log_power.csv. Resetting RTLSDRs and attempting to run rtl_power again.") - # no log_power.csv usually means that rtl_power has locked up and had to be SIGKILL'd. - # This occurs when it can't get samples from the RTLSDR, because it's locked up for some reason. - # Issuing a USB Reset to the rtlsdr can sometimes solve this. - reset_rtlsdr() - search_attempts -= 1 - time.sleep(10) - continue - - - # Rough approximation of the noise floor of the received power spectrum. - power_nf = np.mean(power) - - # Detect peaks. - peak_indices = detect_peaks(power, mph=(power_nf+config['min_snr']), mpd=(config['min_distance']/step), show = False) - - # If we have found no peaks, and no greylist has been provided, re-scan. - if (len(peak_indices) == 0) and (len(config['greylist'])==0): - logging.info("No peaks found on this pass.") - search_attempts -= 1 - time.sleep(10) - continue - - # Sort peaks by power. - peak_powers = power[peak_indices] - peak_freqs = freq[peak_indices] - peak_frequencies = peak_freqs[np.argsort(peak_powers)][::-1] - - # Quantize to nearest x kHz - peak_frequencies = quantize_freq(peak_frequencies, config['quantization']) - - # Append on any frequencies in the supplied greylist - peak_frequencies = np.append(np.array(config['greylist'])*1e6, peak_frequencies) - - # Remove any duplicate entries after quantization, but preserve order. - _, peak_idx = np.unique(peak_frequencies, return_index=True) - peak_frequencies = peak_frequencies[np.sort(peak_idx)] - - # Remove any frequencies in the blacklist. - for _frequency in np.array(config['blacklist'])*1e6: - _index = np.argwhere(peak_frequencies==_frequency) - peak_frequencies = np.delete(peak_frequencies, _index) - - if len(peak_frequencies) == 0: - logging.info("No peaks found after blacklist frequencies removed.") - else: - logging.info("Performing scan on %d frequencies (MHz): %s" % (len(peak_frequencies),str(peak_frequencies/1e6))) - - else: - # We have been provided a whitelist - scan through the supplied frequencies. - peak_frequencies = np.array(config['whitelist'])*1e6 - logging.info("Scanning on whitelist frequencies (MHz): %s" % str(peak_frequencies/1e6)) - - # Run rs_detect on each peak frequency, to determine if there is a sonde there. - for freq in peak_frequencies: - detected = detect_sonde(freq, - sdr_fm=config['sdr_fm_path'], - ppm=config['sdr_ppm'], - gain=config['sdr_gain'], - bias=config['sdr_bias'], - dwell_time=config['dwell_time']) - if detected != None: - sonde_freq = freq - sonde_type = detected - break - - if sonde_type != None: - # Found a sonde! Break out of the while loop and attempt to decode it. - return (sonde_freq, sonde_type) - else: - # No sondes found :-( Wait and try again. - search_attempts -= 1 - logging.warning("Search attempt failed, %d attempts remaining. Waiting %d seconds." % (search_attempts, config['search_delay'])) - time.sleep(config['search_delay']) - - # If we get here, we have exhausted our search attempts. - logging.error("No sondes detected.") - return (None, None) - - -def check_position_valid(data): + Returns: + (str): The device index/serial number of the free/allocated SDR, if one is free, else None. """ - Check to see if a payload position frame breaches one of our filters. - In this function we also check that the payload callsign is not invalid. + global sdr_list + + for _idx in sdr_list.keys(): + if sdr_list[_idx]['in_use'] == False: + # Found a free SDR! + if check_only: + # If we are just checking to see if there are any SDRs free, we don't allocate it. + pass + else: + # Otherwise, set the SDR as in-use. + sdr_list[_idx]['in_use'] = True + logging.info("SDR #%s has been allocated for %s." % (str(_idx), task_description)) + + return _idx + + # Otherwise, no SDRs are free. + return None + + +def start_scanner(): + """ Start a scanner thread on the first available SDR """ + global task_list, sdr_list, config, scan_results, RS_PATH + + if 'SCAN' in task_list: + # Already a scanner running! Return. + logging.debug("Task Manager - Attempted to start a scanner, but one already running.") + return + + # Attempt to allocate a SDR. + _device_idx = allocate_sdr(task_description="Scanner") + if _device_idx is None: + logging.debug("Task Manager - No SDRs free to run Scanner.") + return + else: + # Create entry in task list. + task_list['SCAN'] = {'device_idx': _device_idx, 'task': None} + + # Init Scanner using settings from the global config. + + task_list['SCAN']['task'] = SondeScanner( + callback = scan_results.put, + auto_start = True, + min_freq = config['min_freq'], + max_freq = config['max_freq'], + search_step = config['search_step'], + whitelist = config['whitelist'], + greylist = config['greylist'], + blacklist = config['blacklist'], + snr_threshold = config['snr_threshold'], + min_distance = config['min_distance'], + quantization = config['quantization'], + scan_dwell_time = config['scan_dwell_time'], + detect_dwell_time = config['detect_dwell_time'], + max_peaks = config['max_peaks'], + rs_path = RS_PATH, + sdr_power = config['sdr_power'], + sdr_fm = config['sdr_fm'], + device_idx = _device_idx, + gain = sdr_list[_device_idx]['gain'], + ppm = sdr_list[_device_idx]['ppm'], + bias = sdr_list[_device_idx]['bias'] + ) + + # Add a reference into the sdr_list entry + sdr_list[_device_idx]['task'] = task_list['SCAN']['task'] + + +def stop_scanner(): + """ Stop a currently running scan thread, and release the SDR it was using. """ + global task_list, sdr_list + + if 'SCAN' not in task_list: + # No scanner thread running! + # This means we likely have a SDR free already. + return + else: + logging.info("Halting Scanner to decode detected radiosonde.") + _scan_sdr = task_list['SCAN']['device_idx'] + # Stop the scanner. + task_list['SCAN']['task'].stop() + # Relase the SDR. + sdr_list[_scan_sdr]['in_use'] = False + sdr_list[_scan_sdr]['task'] = None + # Remove the scanner task from the task list + task_list.pop('SCAN') + + +def start_decoder(freq, sonde_type): + """ Attempt to start a decoder thread """ + global config, task_list, sdr_list, RS_PATH, exporter_functions, rs92_ephemeris + + # Allocate a SDR. + _device_idx = allocate_sdr(task_description="Decoder (%s, %.3f MHz)" % (sonde_type, freq/1e6)) + + if _device_idx is None: + logging.error("Could not allocate SDR for decoder!") + return + else: + # Add an entry to the task list + task_list[freq] = {'device_idx': _device_idx, 'task': None} + + # Set the SDR to in-use + sdr_list[_device_idx]['in_use'] = True + + # Initialise a decoder. + task_list[freq]['task'] = SondeDecoder( + sonde_type = sonde_type, + sonde_freq = freq, + rs_path = RS_PATH, + sdr_fm = config['sdr_fm'], + device_idx = _device_idx, + gain = sdr_list[_device_idx]['gain'], + ppm = sdr_list[_device_idx]['ppm'], + bias = sdr_list[_device_idx]['bias'], + exporter = exporter_functions, + timeout = config['rx_timeout'], + telem_filter = telemetry_filter, + rs92_ephemeris = rs92_ephemeris + ) + sdr_list[_device_idx]['task'] = task_list[freq]['task'] + + + +def handle_scan_results(): + """ Read in Scan results via the scan results Queue. + + Depending on how many SDRs are available, two things can happen: + - If there is a free SDR, allocate it to a decoder. + - If there is no free SDR, but a scanner is running, stop the scanner and start decoding. + """ + global scan_results, task_list, sdr_list + if scan_results.qsize() > 0: + _scan_data = scan_results.get() + for _sonde in _scan_data: + _freq = _sonde[0] + _type = _sonde[1] + + if _freq in task_list: + # Already decoding this sonde, continue. + continue + else: + logging.info("Scanner - Detected new %s sonde on %.3f MHz!" % (_type, _freq/1e6)) + if allocate_sdr(check_only=True) is not None : + # There is a SDR free! Start the decoder on that SDR + start_decoder(_freq, _type) + + elif (allocate_sdr(check_only=True) is None) and ('SCAN' in task_list): + # We have run out of SDRs, but a scan thread is running. + # Stop the scan thread and take that receiver! + stop_scanner() + start_decoder(_freq, _type) + else: + # We have no SDRs free + pass + + +def clean_task_list(): + """ Check the task list to see if any tasks have stopped running. If so, release the associated SDR """ + global task_list, sdr_list + + for _key in task_list.keys(): + # Attempt to get the state of the task + try: + _running = task_list[_key]['task'].running() + _task_sdr = task_list[_key]['device_idx'] + except Exception as e: + logging.error("Task Manager - Error getting task %s state - %s" % (str(_key),str(e))) + continue + + if _running == False: + # This task has stopped. Release it's associated SDR. + sdr_list[_task_sdr]['in_use'] = False + sdr_list[_task_sdr]['task'] = None + # Pop the task from the task list. + task_list.pop(_key) + + # Check if there is a scanner thread still running. If not, and if there is a SDR free, start one up again. + if ('SCAN' not in task_list) and (allocate_sdr(check_only=True) is not None): + # We have a SDR free, and we are not running a scan thread. Start one. + start_scanner() + + +def stop_all(): + """ Shut-down all decoders, scanners, and exporters. """ + global task_list, exporter_objects + logging.info("Starting shutdown of all threads.") + for _task in task_list.keys(): + try: + task_list[_task]['task'].stop() + except Exception as e: + logging.error("Error stopping task - %s" % str(e)) + + for _exporter in exporter_objects: + try: + _exporter.close() + except Exception as e: + logging.error("Error stopping exporter - %s" % str(e)) + + +def telemetry_filter(telemetry): + """ Filter incoming radiosonde telemetry based on various factors, + - Invalid Position + - Invalid Altitude + - Abnormal range from receiver. + - Invalid serial number. + """ - # Access the global copy of the station config. Bit of a hack, but the alternative is - # passing the config through multiple layers of functions. global config - # First Check: zero lat/lon - if (data['lat'] == 0.0) and (data['lon'] == 0.0): - logging.warning("Zero Lat/Lon. Sonde does not have GPS lock.") + if (telemetry['lat'] == 0.0) and (telemetry['lon'] == 0.0): + logging.warning("Zero Lat/Lon. Sonde %s does not have GPS lock." % telemetry['id']) return False # Second check: Altitude cap. - if data['alt'] > config['max_altitude']: - _altitude_breach = data['alt'] - config['max_altitude'] - logging.warning("Position breached altitude cap by %d m." % _altitude_breach) + if telemetry['alt'] > config['max_altitude']: + _altitude_breach = telemetry['alt'] - config['max_altitude'] + logging.warning("Sonde %s position breached altitude cap by %d m." % (telemetry['id'], _altitude_breach)) return False # Third check - is the payload more than x km from our listening station. @@ -372,17 +314,17 @@ def check_position_valid(data): if (config['station_lat'] != 0.0) and (config['station_lon'] != 0.0): # Calculate the distance from the station to the payload. _listener = (config['station_lat'], config['station_lon'], config['station_alt']) - _payload = (data['lat'], data['lon'], data['alt']) + _payload = (telemetry['lat'], telemetry['lon'], telemetry['alt']) # Calculate using positon_info function from rotator_utils.py _info = position_info(_listener, _payload) if _info['straight_distance'] > config['max_radius_km']*1000: _radius_breach = _info['straight_distance']/1000.0 - config['max_radius_km'] - logging.warning("Position breached radius cap by %.1f km." % (_radius_breach)) + logging.warning("Sonde %s position breached radius cap by %.1f km." % (telemetry['id'], _radius_breach)) return False # Payload Serial Number Checks - _serial = data['id'] + _serial = telemetry['id'] # Run a Regex to match known Vaisala RS92/RS41 serial numbers (YWWDxxxx) # RS92: https://www.vaisala.com/sites/default/files/documents/Vaisala%20Radiosonde%20RS92%20Serial%20Number.pdf # RS41: https://www.vaisala.com/sites/default/files/documents/Vaisala%20Radiosonde%20RS41%20Serial%20Number.pdf @@ -390,651 +332,33 @@ def check_position_valid(data): vaisala_callsign_valid = re.match(r'[J-T][0-5][\d][1-7]\d{4}', _serial) # Regex to check DFM06/09 callsigns. - # TODO: Check if this valid for DFM06s + # TODO: Check if this valid for DFM06s, and find out what's up with the 8-digit DFM09 callsigns. dfm_callsign_valid = re.match(r'DFM0[69]-\d{6}', _serial) if vaisala_callsign_valid or dfm_callsign_valid: return True else: - logging.warning("Payload ID does not match regex. Discarding.") + logging.warning("Payload ID %s does not match regex. Discarding." % telemetry['id']) return False -# Dictionary of observed payload IDs. -seen_payload_ids = {} +def main(): + """ Main Loop """ + global config, sdr_list, exporter_objects, exporter_functions, logging_level -def payload_id_valid_for_upload(payload_id, update=False): - ''' Update our list of seen payload IDs ''' - global config, seen_payload_ids + # Command line arguments. + parser = argparse.ArgumentParser() + parser.add_argument("-c" ,"--config", default="station.cfg", help="Receive Station Configuration File") + parser.add_argument("-f", "--frequency", type=float, default=0.0, help="Sonde Frequency (MHz) (bypass scan step, and quit if no sonde found).") + parser.add_argument("-e", "--ephemeris", type=str, default="None", help="Use a manually obtained ephemeris file.") + parser.add_argument("-v", "--verbose", help="Enable debug output.", action="store_true") + args = parser.parse_args() - if payload_id in seen_payload_ids: - if seen_payload_ids[payload_id] >= config['payload_id_valid']: - # We have seen this payload ID often enough to consider it to be valid. - return True - else: - if update: - seen_payload_ids[payload_id] += 1 - else: - if update: - seen_payload_ids[payload_id] = 1 + # Set log-level to DEBUG if requested + if args.verbose: + logging_level = logging.DEBUG - # Otherwise, we still haven't seen this payload enough to be sure it's ID is valid. - return False - - -def process_rs_line(line): - """ Process a line of output from the radiosonde decoder, converting it to a dict """ - try: - if line[0] != "{": - return None - - rs_frame = json.loads(line) - # Note: We expect the following fields available within the JSON blob: - # id, frame, datetime, lat, lon, alt, crc - rs_frame['crc'] = True # The demods only report frames that match crc so we can lie here - - if 'temp' not in rs_frame.keys(): - rs_frame['temp'] = -273.0 # We currently don't get temperature data out of the RS92s. - - rs_frame['humidity'] = -1.0 # Currently no Humidity data available. - rs_frame['datetime_str'] = rs_frame['datetime'].replace("Z","") #python datetime sucks - rs_frame['short_time'] = rs_frame['datetime'].split(".")[0].split("T")[1] - - _telem_string = "%s,%d,%s,%.5f,%.5f,%.1f,%.1f,%s" % (rs_frame['id'], rs_frame['frame'],rs_frame['datetime'], rs_frame['lat'], rs_frame['lon'], rs_frame['alt'], rs_frame['temp'], rs_frame['crc']) - - if check_position_valid(rs_frame): - logging.info("TELEMETRY: %s" % _telem_string) - # Update the seen-payload-id list - # This will then be queried within the internet upload threads. - payload_id_valid_for_upload(rs_frame['id'],update=True) - - return rs_frame - else: - logging.warning("Invalid Position, discarding: %s" % _telem_string) - return None - - except: - logging.error("Could not parse string: %s" % line) - traceback.print_exc() - return None - -def update_flight_stats(data): - """ Maintain a record of flight statistics. """ - global flight_stats - - # Save the current frame into the 'last' frame storage - flight_stats['last'] = data - - # Is this our first telemetry frame? - # If so, populate all fields in the flight stats dict with the current telemetry frame. - if flight_stats['first'] == None: - flight_stats['first'] = data - flight_stats['apogee'] = data - - # Is the current altitude higher than the current peak altitude? - if data['alt'] > flight_stats['apogee']['alt']: - flight_stats['apogee'] = data - - - -def calculate_flight_statistics(): - """ Produce a flight summary, for inclusion in the log file. """ - global flight_stats - - # Grab peak altitude. - peak_altitude = flight_stats['apogee']['alt'] - - # Grab last known descent rate - descent_rate = flight_stats['last']['vel_v'] - - # Calculate average ascent rate, based on data we have. - # Wrap this in a try, in case we have time string parsing issues. - try: - if flight_stats['first'] == flight_stats['apogee']: - # We have only caught a flight during descent. Don't calculate ascent rate. - ascent_rate = -1.0 - else: - ascent_height = flight_stats['apogee']['alt'] - flight_stats['first']['alt'] - start_time = datetime.datetime.strptime(flight_stats['first']['datetime_str'],"%Y-%m-%dT%H:%M:%S.%f") - apogee_time = datetime.datetime.strptime(flight_stats['apogee']['datetime_str'],"%Y-%m-%dT%H:%M:%S.%f") - ascent_time = (apogee_time - start_time).seconds - ascent_rate = ascent_height/float(ascent_time) - except: - ascent_rate = -1.0 - - stats_str = "Acquired %s at %s on %s, at %d m altitude.\n" % (flight_stats['first']['type'], flight_stats['first']['datetime_str'], flight_stats['first']['freq'], int(flight_stats['first']['alt'])) - stats_str += "Ascent Rate: %.1f m/s, Peak Altitude: %d, Descent Rate: %.1f m/s\n" % (ascent_rate, int(peak_altitude), descent_rate) - stats_str += "Last Position: %.5f, %.5f, %d m alt, at %s\n" % (flight_stats['last']['lat'], flight_stats['last']['lon'], int(flight_stats['last']['alt']), flight_stats['last']['datetime_str']) - stats_str += "Flight Path: https://aprs.fi/#!call=%s&timerange=10800&tail=10800\n" % flight_stats['last']['id'] - - return stats_str - -def decode_rs92(frequency, sdr_fm='rtl_fm', ppm=0, gain=-1, bias=False, invert=False, rx_queue=None, almanac=None, ephemeris=None, timeout=120, save_log=False): - """ Decode a RS92 sonde """ - global latest_sonde_data, internet_push_queue, ozi_push_queue - - # Before we get started, do we need to download GPS data? - if ephemeris == None: - # If no ephemeris data defined, attempt to download it. - # get_ephemeris will either return the saved file name, or None. - ephemeris = get_ephemeris(destination="ephemeris.dat") - - # If ephemeris is still None, then we failed to download the ephemeris data. - # Try and grab the almanac data instead - if ephemeris == None: - logging.error("Could not obtain ephemeris data, trying to download an almanac.") - almanac = get_almanac(destination="almanac.txt") - if almanac == None: - # We probably don't have an internet connection. Bomb out, since we can't do much with the sonde telemetry without an almanac! - logging.critical("Could not obtain GPS ephemeris or almanac data.") - return False - - # Add a -T option if bias is enabled - bias_option = "-T " if bias else "" - - # Add a gain parameter if we have been provided one. - if gain != -1: - gain_param = '-g %.1f ' % gain - else: - gain_param = '' - - # Example command: - # rtl_fm -p 0 -g 26.0 -M fm -F9 -s 12k -f 400500000 | sox -t raw -r 12k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - highpass 20 lowpass 2500 2>/dev/null | ./rs92ecc -vx -v --crc --ecc --vel -e ephemeris.dat - decode_cmd = "%s %s-p %d %s-M fm -F9 -s 12k -f %d 2>/dev/null |" % (sdr_fm,bias_option, int(ppm), gain_param, frequency) - decode_cmd += "sox -t raw -r 12k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - lowpass 2500 highpass 20 2>/dev/null |" - - # Note: I've got the check-CRC option hardcoded in here as always on. - # I figure this is prudent if we're going to proceed to push this telemetry data onto a map. - if ephemeris != None: - decode_cmd += "./rs92ecc -vx -v --crc --ecc --vel -e %s" % ephemeris - elif almanac != None: - decode_cmd += "./rs92ecc -vx -v --crc --ecc --vel -a %s" % almanac - - # Add inversion option if we have detected the signal as being inverted (shouldn't happen, but anyway...) - if invert: - decode_cmd += " -i" - - logging.debug("Running command: %s" % decode_cmd) - - rx_last_line = time.time() - - # Receiver subprocess. Discard stderr, and feed stdout into an asynchronous read class. - rx = subprocess.Popen(decode_cmd, shell=True, stdin=None, stdout=subprocess.PIPE, preexec_fn=os.setsid) - rx_stdout = AsynchronousFileReader(rx.stdout, autostart=True) - - _log_file = None - - while not rx_stdout.eof(): - for line in rx_stdout.readlines(): - if (line != None) and (line != ""): - try: - data = process_rs_line(line) - - if data != None: - # Reset timeout counter - rx_last_line = time.time() - # Add in a few fields that don't come from the sonde telemetry. - data['freq'] = "%.3f MHz" % (frequency/1e6) - data['type'] = "RS92" - - # If we are seeing any aux data (i.e. there is something strapped to this RS92), append '-Ozone' to the type. - if 'aux' in data.keys(): - _ozone = "-Ozone" - else: - _ozone = "" - - # post to MQTT - if mqtt_client: - data['seen_by'] = config['uploader_callsign'] - mqtt_client.publish("sonde/%s" % data['id'], payload=json.dumps(data), retain=True) - - # Per-Sonde Logging - if save_log: - if _log_file is None: - _existing_files = glob.glob("./log/*%s_%s*_sonde.log" % (data['id'], data['type'])) - if len(_existing_files) != 0: - _log_file_name = _existing_files[0] - logging.debug("Using existing log file: %s" % _log_file_name) - else: - _log_file_name = "./log/%s_%s_%s_%d_sonde.log" % ( - datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S"), - data['id'], - (data['type'] + _ozone), - int(frequency/1e3)) - logging.debug("Opening new log file: %s" % _log_file_name) - - _log_file = open(_log_file_name,'ab') - - # Write a log line - # datetime,id,frame_no,lat,lon,alt,type,frequency - _log_line = "%s,%s,%d,%.5f,%.5f,%.1f,%.1f,%s,%.3f\n" % ( - data['datetime_str'], - data['id'], - data['frame'], - data['lat'], - data['lon'], - data['alt'], - data['temp'], - (data['type'] + _ozone), - frequency/1e6) - - _log_file.write(_log_line) - _log_file.flush() - - - update_flight_stats(data) - - if rx_queue != None: - try: - internet_push_queue.put_nowait(data) - ozi_push_queue.put_nowait(data) - except: - pass - except: - traceback.print_exc() - logging.error("Error parsing line: %s" % line) - - # Check timeout counter. - if time.time() > (rx_last_line+timeout): - logging.error("RX Timed out.") - break - # Sleep for a short time. - time.sleep(0.1) - - # If we were writing a log, close the file. - if _log_file != None: - _log_file.flush() - _log_file.close() - - logging.error("Closing RX Thread.") - os.killpg(os.getpgid(rx.pid), signal.SIGTERM) - rx_stdout.stop() - rx_stdout.join() - return - - -def decode_rs41(frequency, sdr_fm='rtl_fm', ppm=0, gain=-1, bias=False, invert=False, rx_queue=None, timeout=120, save_log=False): - """ Decode a RS41 sonde """ - global latest_sonde_data, internet_push_queue, ozi_push_queue - # Add a -T option if bias is enabled - bias_option = "-T " if bias else "" - - # Add a gain parameter if we have been provided one. - if gain != -1: - gain_param = '-g %.1f ' % gain - else: - gain_param = '' - - # rtl_fm -p 0 -g -1 -M fm -F9 -s 15k -f 405500000 | sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - lowpass 2600 2>/dev/null | ./rs41ecc --crc --ecc --ptu - # Note: Have removed a 'highpass 20' filter from the sox line, will need to re-evaluate if adding that is useful in the future. - decode_cmd = "%s %s-p %d %s-M fm -F9 -s 15k -f %d 2>/dev/null |" % (sdr_fm, bias_option, int(ppm), gain_param, frequency) - decode_cmd += "sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - lowpass 2600 2>/dev/null |" - - # Note: I've got the check-CRC option hardcoded in here as always on. - # I figure this is prudent if we're going to proceed to push this telemetry data onto a map. - decode_cmd += "./rs41ecc --crc --ecc --ptu" - - # Add inversion option if we have detected the signal as being inverted (shouldn't happen, but anyway...) - if invert: - decode_cmd += " -i" - - logging.debug("Running command: %s" % decode_cmd) - - rx_last_line = time.time() - - # Receiver subprocess. Discard stderr, and feed stdout into an asynchronous read class. - rx = subprocess.Popen(decode_cmd, shell=True, stdin=None, stdout=subprocess.PIPE, preexec_fn=os.setsid) - rx_stdout = AsynchronousFileReader(rx.stdout, autostart=True) - - _log_file = None - - while not rx_stdout.eof(): - for line in rx_stdout.readlines(): - if (line != None) and (line != ""): - try: - data = process_rs_line(line) - - if data != None: - # Reset timeout counter. - rx_last_line = time.time() - # Add in a few fields that don't come from the sonde telemetry. - data['freq'] = "%.3f MHz" % (frequency/1e6) - data['type'] = "RS41" - - # post to MQTT - if mqtt_client: - data['seen_by'] = config['uploader_callsign'] - mqtt_client.publish("sonde/%s" % data['id'], payload=json.dumps(data), retain=True) - - # Per-Sonde Logging - if save_log: - if _log_file is None: - _existing_files = glob.glob("./log/*%s_%s*_sonde.log" % (data['id'], data['type'])) - if len(_existing_files) != 0: - _log_file_name = _existing_files[0] - logging.debug("Using existing log file: %s" % _log_file_name) - else: - _log_file_name = "./log/%s_%s_%s_%d_sonde.log" % ( - datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S"), - data['id'], - data['type'], - int(frequency/1e3)) - logging.debug("Opening new log file: %s" % _log_file_name) - - _log_file = open(_log_file_name,'ab') - - # Write a log line - # datetime,id,frame_no,lat,lon,alt,type,frequency - _log_line = "%s,%s,%d,%.5f,%.5f,%.1f,%.1f,%s,%.3f\n" % ( - data['datetime_str'], - data['id'], - data['frame'], - data['lat'], - data['lon'], - data['alt'], - data['temp'], - data['type'], - frequency/1e6) - - _log_file.write(_log_line) - _log_file.flush() - - update_flight_stats(data) - - latest_sonde_data = data - - if rx_queue != None: - try: - internet_push_queue.put_nowait(data) - ozi_push_queue.put_nowait(data) - except: - pass - except: - _err_str = traceback.format_exc() - logging.error("Error parsing line: %s - %s" % (line, _err_str)) - - # Check timeout counter. - if time.time() > (rx_last_line+timeout): - logging.error("RX Timed out.") - break - # Sleep for a short time. - time.sleep(0.1) - - # If we were writing a log, close the file. - if _log_file != None: - _log_file.flush() - _log_file.close() - - logging.error("Closing RX Thread.") - os.killpg(os.getpgid(rx.pid), signal.SIGTERM) - rx_stdout.stop() - rx_stdout.join() - return - - -def decode_dfm(frequency, sdr_fm='rtl_fm', ppm=0, gain=-1, bias=False, invert=False, rx_queue=None, timeout=120, save_log=False): - """ Decode a Graw DFM06/DFM09 sonde """ - global latest_sonde_data, internet_push_queue, ozi_push_queue - # Add a -T option if bias is enabled - bias_option = "-T " if bias else "" - - # Add a gain parameter if we have been provided one. - if gain != -1: - gain_param = '-g %.1f ' % gain - else: - gain_param = '' - - # rtl_fm -p 0 -g 26.0 -M fm -F9 -s 15k -f 403250000 | sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - highpass 20 lowpass 2500 2>/dev/null | ./dfm09ecc -vv --ecc - # Note: Have removed a 'highpass 20' filter from the sox line, will need to re-evaluate if adding that is useful in the future. - decode_cmd = "%s %s-p %d %s-M fm -F9 -s 15k -f %d 2>/dev/null |" % (sdr_fm, bias_option, int(ppm), gain_param, frequency) - decode_cmd += "sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - highpass 20 lowpass 2000 2>/dev/null |" - - # DFM decoder - decode_cmd += "./dfm09ecc -vv --ecc" - - # Add inversion option if we have detected the signal as being inverted - if invert: - # Note: Currently ignoring the invert option on the DFM sondes. - #decode_cmd += " -i" - pass - - logging.debug("Running command: %s" % decode_cmd) - - rx_last_line = time.time() - - # Receiver subprocess. Discard stderr, and feed stdout into an asynchronous read class. - rx = subprocess.Popen(decode_cmd, shell=True, stdin=None, stdout=subprocess.PIPE, preexec_fn=os.setsid) - rx_stdout = AsynchronousFileReader(rx.stdout, autostart=True) - - _log_file = None - - while not rx_stdout.eof(): - for line in rx_stdout.readlines(): - if (line != None) and (line != ""): - try: - data = process_rs_line(line) - - if data != None: - # Reset timeout counter. - rx_last_line = time.time() - # Add in a few fields that don't come from the sonde telemetry. - data['freq'] = "%.3f MHz" % (frequency/1e6) - data['type'] = "DFM" - - # post to MQTT - if mqtt_client: - data['seen_by'] = config['uploader_callsign'] - mqtt_client.publish("sonde/%s" % data['id'], payload=json.dumps(data), retain=True) - - # Per-Sonde Logging - if save_log: - if _log_file is None: - _existing_files = glob.glob("./log/*%s_%s*_sonde.log" % (data['id'], data['type'])) - if len(_existing_files) != 0: - _log_file_name = _existing_files[0] - logging.debug("Using existing log file: %s" % _log_file_name) - else: - _log_file_name = "./log/%s_%s_%s_%d_sonde.log" % ( - datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S"), - data['id'], - data['type'], - int(frequency/1e3)) - logging.debug("Opening new log file: %s" % _log_file_name) - - _log_file = open(_log_file_name,'ab') - - # Write a log line - # datetime,id,frame_no,lat,lon,alt,type,frequency - _log_line = "%s,%s,%d,%.5f,%.5f,%.1f,%.1f,%s,%.3f\n" % ( - data['datetime_str'], - data['id'], - data['frame'], - data['lat'], - data['lon'], - data['alt'], - data['temp'], - data['type'], - frequency/1e6) - - _log_file.write(_log_line) - _log_file.flush() - - update_flight_stats(data) - - latest_sonde_data = data - - if rx_queue != None: - try: - internet_push_queue.put_nowait(data) - ozi_push_queue.put_nowait(data) - except: - pass - except: - _err_str = traceback.format_exc() - logging.error("Error parsing line: %s - %s" % (line, _err_str)) - - # Check timeout counter. - if time.time() > (rx_last_line+timeout): - logging.error("RX Timed out.") - break - # Sleep for a short time. - time.sleep(0.1) - - # If we were writing a log, close the file. - if _log_file != None: - _log_file.flush() - _log_file.close() - - logging.error("Closing RX Thread.") - os.killpg(os.getpgid(rx.pid), signal.SIGTERM) - rx_stdout.stop() - rx_stdout.join() - return - - -def internet_push_thread(station_config): - """ Push a frame of sonde data into various internet services (APRS-IS, Habitat), and also to a rotator (if configured) """ - global internet_push_queue, INTERNET_PUSH_RUNNING, habitat_uploader - logging.info("Started Internet Push thread.") - while INTERNET_PUSH_RUNNING: - data = None - try: - # Wait until there is somethign in the queue before trying to process. - if internet_push_queue.empty(): - time.sleep(1) - continue - else: - # Read in entire contents of queue, and keep the most recent entry. - while not internet_push_queue.empty(): - data = internet_push_queue.get() - except: - traceback.print_exc() - continue - - try: - # Wrap this entire section in a try/except, to catch any data parsing errors. - - # Test to see if this payload ID has been seen often enough to permit uploading. - if not payload_id_valid_for_upload(data['id'],update=False): - logging.warning("Payload ID has not been observed enough to permit uploading.") - else: - # Data from this payload is considered 'valid' - - # APRS Upload - if station_config['enable_aprs'] and (data['lat'] != 0.0) and (data['lon'] != 0.0): - # Produce aprs comment, based on user config. - aprs_comment = station_config['aprs_custom_comment'] - aprs_comment = aprs_comment.replace("", data['freq']) - aprs_comment = aprs_comment.replace("", data['id']) - aprs_comment = aprs_comment.replace("", "%.1f degC" % data['temp']) - aprs_comment = aprs_comment.replace("", "%.1fm/s" % data['vel_v']) - # Add 'Ozone' to the sonde type field if we are seeing aux data. - _sonde_type = data['type'] - if 'aux' in data.keys(): - _sonde_type += "-Ozone" - aprs_comment = aprs_comment.replace("", _sonde_type) - - # Push data to APRS. - aprs_data = push_balloon_to_aprs(data, - object_name=station_config['aprs_object_id'], - aprs_comment=aprs_comment, - aprsUser=station_config['aprs_user'], - aprsPass=station_config['aprs_pass'], - serverHost=station_config['aprs_server']) - logging.info("Data pushed to APRS-IS: %s" % aprs_data) - - # Habitat Upload - if station_config['enable_habitat']: - # We make the habitat comment field fixed, as we only need to add the payload type/serial/frequency. - # If we are seeing aux data, it likely means we have an Ozone sonde! - if 'aux' in data.keys(): - _ozone = "-Ozone" - else: - _ozone = "" - - payload_callsign = config['payload_callsign'] - if config['payload_callsign'] == "": - payload_callsign = 'RS_' + data['id'] - initPayloadDoc(payload_callsign, config['payload_description']) # it's fine for us to call this multiple times as initPayloadDoc keeps a cache for serial numbers it's created payloads for. - - # Create comment field. - habitat_comment = "%s%s %s %s" % (data['type'], _ozone, data['id'], data['freq']) - - habitat_upload_payload_telemetry(habitat_uploader, - data, - payload_callsign=payload_callsign, - callsign=config['uploader_callsign'], - comment=habitat_comment) - - # Update Rotator positon, if configured. - if config['enable_rotator'] and (config['station_lat'] != 0.0) and (config['station_lon'] != 0.0): - # Calculate Azimuth & Elevation to Radiosonde. - rel_position = position_info((config['station_lat'], config['station_lon'], config['station_alt']), - (data['lat'], data['lon'], data['alt'])) - - # Update the rotator with the current sonde position. - update_rotctld(hostname=config['rotator_hostname'], - port=config['rotator_port'], - azimuth=rel_position['bearing'], - elevation=rel_position['elevation']) - - except: - logging.error("Error while uploading data: %s" % traceback.format_exc()) - - if station_config['synchronous_upload']: - # Sleep for a second to ensure we don't double upload in the same slot (shouldn't' happen, but anyway...) - time.sleep(1) - - # Wait until the next valid uplink timeslot. - # This is determined by waiting until the time since epoch modulus the upload rate is equal to zero. - # Note that this will result in some odd upload times, due to leap seconds and otherwise, but should - # result in multiple stations (assuming local timezones are the same, and the stations are synced to NTP) - # uploading at roughly the same time. - while int(time.time())%station_config['upload_rate'] != 0: - time.sleep(0.1) - else: - # Otherwise, just sleep. - time.sleep(station_config['upload_rate']) - - logging.debug("Closing internet push thread.") - -def ozi_push_thread(station_config): - """ Push a frame of sonde data into various internet services (APRS-IS, Habitat) """ - global ozi_push_queue, OZI_PUSH_RUNNING - logging.info("Started OziPlotter Push thread.") - while OZI_PUSH_RUNNING: - data = None - try: - # Wait until there is somethign in the queue before trying to process. - if ozi_push_queue.empty(): - time.sleep(1) - continue - else: - # Read in entire contents of queue, and keep the most recent entry. - while not ozi_push_queue.empty(): - data = ozi_push_queue.get() - except: - traceback.print_exc() - continue - - try: - if station_config['ozi_enabled']: - push_telemetry_to_ozi(data,hostname=station_config['ozi_hostname'], udp_port=station_config['ozi_port']) - - if station_config['payload_summary_enabled']: - push_payload_summary(data, udp_port=station_config['payload_summary_port']) - except: - traceback.print_exc() - - time.sleep(station_config['ozi_update_rate']) - - logging.debug("Closing thread.") - - -if __name__ == "__main__": - - # Setup logging. logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', filename=datetime.datetime.utcnow().strftime("log/%Y%m%d-%H%M%S_system.log"), level=logging_level) stdout_format = logging.Formatter('%(asctime)s %(levelname)s:%(message)s') stdout_handler = logging.StreamHandler(sys.stdout) @@ -1047,188 +371,77 @@ if __name__ == "__main__": urllib3_log = logging.getLogger("urllib3") urllib3_log.setLevel(logging.CRITICAL) - # Command line arguments. - parser = argparse.ArgumentParser() - parser.add_argument("-c" ,"--config", default="station.cfg", help="Receive Station Configuration File") - parser.add_argument("-f", "--frequency", type=float, default=0.0, help="Sonde Frequency (MHz) (bypass scan step, and quit if no sonde found).") - parser.add_argument("-t", "--timeout", type=int, default=180, help="Stop receiving after X minutes. Set to 0 to run continuously with no timeout.") - parser.add_argument("-e", "--ephemeris", type=str, default="None", help="Use a manually obtained ephemeris file.") - args = parser.parse_args() - # If we haven't been given an ephemeris file, set the ephemeris variable to None, so that we download one. - ephemeris = args.ephemeris - if ephemeris == "None": - ephemeris = None + # Attempt to read in config file + logging.info("Reading configuration file...") + _temp_cfg = read_auto_rx_config(args.config) + if _temp_cfg is None: + logging.critical("Error in configuration file! Exiting...") + sys.exit(1) + else: - logging.info("Using provided ephemeris file: %s" % ephemeris) + config = _temp_cfg + sdr_list = config['sdr_settings'] - # Attempt to read in configuration file. Use default config if reading fails. - config = read_auto_rx_config(args.config) + # If we have been supplied a frequency via the command line, override the whitelist settings. + if args.frequency != 0.0: + config['whitelist'] = [args.frequency] - logging.debug("Using Configuration: %s" % str(config)) - # Set the timeout - timeout_time = time.time() + int(args.timeout)*60 + # Start our exporter options + # Telemetry Logger + if config['per_sonde_log']: + _logger = TelemetryLogger(log_directory="./log/") + exporter_objects.append(_logger) + exporter_functions.append(_logger.add) - # Internet push thread object. - push_thread_1 = None - push_thread_2 = None + # Habitat Uploader + if config['habitat_enabled']: + if config['habitat_payload_callsign'] == "": + _habitat_payload_call = None + else: + _habitat_payload_call = config['habitat_payload_callsign'] - # Sonde Frequency & Type variables. - sonde_freq = None - sonde_type = None + if config['habitat_upload_listener_position'] is False: + _habitat_user_position = None + else: + _habitat_user_position = (config['station_lat'], config['station_lon'], config['station_alt']) + + _habitat = HabitatUploader( + user_callsign = config['habitat_uploader_callsign'], + user_position = _habitat_user_position, + payload_callsign_override = _habitat_payload_call, + synchronous_upload_time = config['habitat_upload_rate'], + callsign_validity_threshold = config['payload_id_valid'] + ) - # MQTT Client - mqtt_client = None + exporter_objects.append(_habitat) + exporter_functions.append(_habitat.add) + + + # APRS - TODO + + # OziExplorer - TODO + + # MQTT (?) - TODO + + + while True: + clean_task_list() + handle_scan_results() + time.sleep(2) + + + + +if __name__ == "__main__": try: - # If Habitat upload is enabled and we have been provided with listener coords, push our position to habitat - if config['enable_habitat'] and (config['station_lat'] != 0.0) and (config['station_lon'] != 0.0) and config['upload_listener_position']: - uploadListenerPosition(config['uploader_callsign'], config['station_lat'], config['station_lon'], version=AUTO_RX_VERSION) - - if config['enable_habitat']: - habitat_uploader = HabitatUploader(user_callsign=config['uploader_callsign']) - - if config['mqtt_enabled']: - import paho.mqtt.client - mqtt_client = paho.mqtt.client.Client() - print "Connecting to MQTT Server %s:%s" % (config['mqtt_hostname'], config['mqtt_port']) - mqtt_client.connect(config['mqtt_hostname'], config['mqtt_port']) - mqtt_client.loop_start() - - # Main scan & track loop. We keep on doing this until we timeout (i.e. after we expect the sonde to have landed) - - while time.time() < timeout_time or args.timeout == 0: - # Attempt to detect a sonde on a supplied frequency. - if args.frequency != 0.0: - sonde_type = detect_sonde(int(float(args.frequency)*1e6), sdr_fm=config['sdr_fm_path'], ppm=config['sdr_ppm'], gain=config['sdr_gain'], bias=config['sdr_bias']) - if sonde_type != None: - sonde_freq = int(float(args.frequency)*1e6) - else: - logging.info("No sonde found. Exiting.") - INTERNET_PUSH_RUNNING = False - OZI_PUSH_RUNNING = False - if habitat_uploader != None: - habitat_uploader.close() - sys.exit(1) - - # If we have a rotator configured, attempt to point the rotator to the home location - if config['enable_rotator'] and (config['station_lat'] != 0.0) and (config['station_lon'] != 0.0) and config['rotator_homing_enabled']: - update_rotctld(hostname=config['rotator_hostname'], - port=config['rotator_port'], - azimuth=config['rotator_home_azimuth'], - elevation=config['rotator_home_elevation']) - - # If nothing is detected, or we haven't been supplied a frequency, perform a scan. - if sonde_type == None: - (sonde_freq, sonde_type) = sonde_search(config, config['search_attempts']) - - # If we *still* haven't detected a sonde... just keep on trying, until we hit our timeout. - if sonde_type == None: - continue - - logging.info("Starting decoding of %s on %.3f MHz" % (sonde_type, sonde_freq/1e6)) - - # Re-push our listener position to habitat, as if we have been running continuously we may have dropped off the map. - if config['enable_habitat'] and (config['station_lat'] != 0.0) and (config['station_lon'] != 0.0) and config['upload_listener_position']: - uploadListenerPosition(config['uploader_callsign'], config['station_lat'], config['station_lon'], version=AUTO_RX_VERSION) - - - # Start both of our internet/ozi push threads, even if we're not going to use them. - if push_thread_1 == None: - push_thread_1 = Thread(target=internet_push_thread, kwargs={'station_config':config}) - push_thread_1.start() - - if push_thread_2 == None: - push_thread_2 = Thread(target=ozi_push_thread, kwargs={'station_config':config}) - push_thread_2.start() - - - # Look for an inverted detection flag. - if sonde_type[0] == '-': - invert_fm = True - sonde_type = sonde_type[1:] - else: - invert_fm = False - - # Start decoding the sonde! - if sonde_type == "RS92": - decode_rs92(sonde_freq, - sdr_fm=config['sdr_fm_path'], - ppm=config['sdr_ppm'], - gain=config['sdr_gain'], - bias=config['sdr_bias'], - invert=invert_fm, - rx_queue=internet_push_queue, - timeout=config['rx_timeout'], - save_log=config['per_sonde_log'], - ephemeris=ephemeris - ) - - elif sonde_type == "RS41": - decode_rs41(sonde_freq, - sdr_fm=config['sdr_fm_path'], - ppm=config['sdr_ppm'], - gain=config['sdr_gain'], - bias=config['sdr_bias'], - invert=invert_fm, - rx_queue=internet_push_queue, - timeout=config['rx_timeout'], - save_log=config['per_sonde_log'], - ) - - elif sonde_type == 'DFM': - decode_dfm(sonde_freq, - sdr_fm=config['sdr_fm_path'], - ppm=config['sdr_ppm'], - gain=config['sdr_gain'], - bias=config['sdr_bias'], - invert=invert_fm, - rx_queue=internet_push_queue, - timeout=config['rx_timeout'], - save_log=config['per_sonde_log'], - ) - - else: - logging.error("Unsupported sonde type: %s" % sonde_type) - pass - - # Receiver has timed out. Reset sonde type and frequency variables and loop. - logging.error("Receiver timed out. Re-starting scan.") - time.sleep(config['search_delay']) - sonde_type = None - sonde_freq = None - + main() except KeyboardInterrupt: - logging.info("Caught CTRL-C, exiting. Please wait for all processes to finish (may take up to a minute).") - # Shut down the Internet Push Threads. - INTERNET_PUSH_RUNNING = False - OZI_PUSH_RUNNING = False - - if habitat_uploader != None: - habitat_uploader.close() - # Kill all rtl_fm processes. - os.system('killall rtl_power') - os.system('killall rtl_fm') - #.. and the rx_tools equivalents, just in case. - os.system('killall rx_power') - os.system('killall rx_fm') - sys.exit(0) - # Note that if we are running as a service, we won't ever get here. - - logging.info("Exceeded maximum receive time. Exiting.") - - # Write flight statistics to file. - if flight_stats['last'] != None: - stats_str = calculate_flight_statistics() - logging.info(stats_str) - - f = open("last_positions.txt", 'a') - f.write(stats_str + "\n") - f.close() - - # Stop the Output threads. - INTERNET_PUSH_RUNNING = False - OZI_PUSH_RUNNING = False - + stop_all() + except Exception as e: + traceback.print_exc() + print("Main Loop Error - %s" % str(e)) + stop_all() diff --git a/auto_rx/auto_rx.sh b/auto_rx/auto_rx.sh index 998b3b0..3193fa2 100755 --- a/auto_rx/auto_rx.sh +++ b/auto_rx/auto_rx.sh @@ -6,12 +6,14 @@ # NOTE: If running this from crontab, make sure to set the appropriate PATH env-vars, # else utilities like rtl_power and rtl_fm won't be found. # +# WARNING - THIS IS DEPRECATED - USE THE SYSTEMD SERVICE +# # change into appropriate directory cd /home/pi/radiosonde_auto_rx/auto_rx/ # Clean up old files -rm log_power.csv +rm log_power*.csv # Start auto_rx process with a 3 hour timeout. timeout 14400 python auto_rx.py 2>error.log diff --git a/auto_rx/autorx/__init__.py b/auto_rx/autorx/__init__.py index fe2dda5..c20bea6 100644 --- a/auto_rx/autorx/__init__.py +++ b/auto_rx/autorx/__init__.py @@ -5,4 +5,4 @@ # Copyright (C) 2018 Mark Jessop # Released under GNU GPL v3 or later # -__version__ = "20180525" \ No newline at end of file +__version__ = "20180525-alpha" \ No newline at end of file diff --git a/auto_rx/autorx/config.py b/auto_rx/autorx/config.py index 1694683..0314970 100644 --- a/auto_rx/autorx/config.py +++ b/auto_rx/autorx/config.py @@ -53,7 +53,6 @@ def read_auto_rx_config(filename): 'habitat_uploader_callsign': 'SONDE_AUTO_RX', 'habitat_upload_listener_position': False, 'habitat_payload_callsign': '', - 'habitat_payload_description': 'Meteorological Radiosonde', # APRS Settings 'aprs_enabled' : False, 'aprs_upload_rate': 30, @@ -72,6 +71,7 @@ def read_auto_rx_config(filename): 'synchronous_upload' : False, 'scan_dwell_time' : 20, 'detect_dwell_time' : 5, + 'scan_delay' : 10, 'payload_id_valid' : 5, # Rotator Settings 'enable_rotator': False, @@ -83,7 +83,6 @@ def read_auto_rx_config(filename): # OziExplorer Settings 'ozi_enabled' : False, 'ozi_update_rate': 5, - 'ozi_hostname' : '127.0.0.1', 'ozi_port' : 55681, 'payload_summary_enabled': False, 'payload_summary_port' : 55672 @@ -124,7 +123,6 @@ def read_auto_rx_config(filename): auto_rx_config['habitat_enabled'] = config.getboolean('habitat', 'habitat_enabled') auto_rx_config['habitat_upload_rate'] = config.getint('habitat', 'upload_rate') auto_rx_config['habitat_payload_callsign'] = config.get('habitat', 'payload_callsign') - auto_rx_config['habitat_payload_description'] = config.get('habitat', 'payload_description') auto_rx_config['habitat_uploader_callsign'] = config.get('habitat', 'uploader_callsign') auto_rx_config['habitat_upload_listener_position'] = config.getboolean('habitat','upload_listener_position') @@ -153,11 +151,13 @@ def read_auto_rx_config(filename): auto_rx_config['max_peaks'] = config.getint('advanced', 'max_peaks') auto_rx_config['scan_dwell_time'] = config.getint('advanced', 'scan_dwell_time') auto_rx_config['detect_dwell_time'] = config.getint('advanced', 'detect_dwell_time') + auto_rx_config['scan_delay'] = config.getint('advanced', 'scan_delay') auto_rx_config['payload_id_valid'] = config.getint('advanced', 'payload_id_valid') auto_rx_config['synchronous_upload'] = config.getboolean('advanced', 'synchronous_upload') # Rotator Settings (TBC) auto_rx_config['rotator_enabled'] = config.getboolean('rotator','rotator_enabled') + auto_rx_config['rotator_update_rate'] = config.getint('rotator', 'update_rate') auto_rx_config['rotator_hostname'] = config.get('rotator', 'rotator_hostname') auto_rx_config['rotator_port'] = config.getint('rotator', 'rotator_port') auto_rx_config['rotator_homing_enabled'] = config.getboolean('rotator', 'rotator_homing_enabled') @@ -176,7 +176,7 @@ def read_auto_rx_config(filename): _bias = config.getboolean(_section, 'bias') if (auto_rx_config['sdr_quantity'] > 1) and (_device_idx == '0'): - logging.error("Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!") + logging.critical("Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!") return None # See if the SDR exists. @@ -190,6 +190,25 @@ def read_auto_rx_config(filename): logging.error("Config - Error parsing SDR %d config - %s" % (_n,str(e))) continue + # Sanity checks when using more than one SDR + if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['habitat_payload_callsign'] != ""): + logging.critical("Fixed Habitat Payload callsign used in a multi-SDR configuration. Go read the warnings in the config file!") + return None + + if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['aprs_object_id'] != ""): + logging.critical("Fixed APRS object ID used in a multi-SDR configuration. Go read the warnings in the config file!") + return None + + if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['rotator_enabled']): + logging.critical("Rotator enabled in a multi-SDR configuration. Go read the warnings in the config file!") + return None + + # TODO: Revisit this limitation once the OziPlotter output sub-module is complete. + if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['ozi_enabled'] or auto_rx_config['payload_summary_enabled']): + logging.critical("Chase car outputs (OziPlotter/Payload Summary) enabled in a multi-SDR configuration.") + return None + + if len(auto_rx_config['sdr_settings'].keys()) == 0: # We have no SDRs to use!! logging.error("Config - No working SDRs! Cannot run...") diff --git a/auto_rx/autorx/decode.py b/auto_rx/autorx/decode.py index 58d6a59..f620607 100644 --- a/auto_rx/autorx/decode.py +++ b/auto_rx/autorx/decode.py @@ -373,29 +373,30 @@ class SondeDecoder(object): if 'aux' in _telemetry: _telemetry['type'] += "-Ozone" + # If we have been provided a telemetry filter function, pass the telemetry data + # through the filter, and return the response + # By default, we will assume the telemetry is OK. + _telem_ok = True + if self.telem_filter is not None: + try: + _telem_ok = self.telem_filter(_telemetry) + except Exception as e: + self.log_error("Failed to run telemetry filter - %s" % str(e)) + _telem_ok = True - # Send to the exporter functions (if we have any). + + # If the telemetry is OK, send to the exporter functions (if we have any). if self.exporters is None: return else: - for _exporter in self.exporters: - try: - _exporter(_telemetry) - except Exception as e: - self.log_error("Exporter Error %s" % str(e)) + if _telem_ok: + for _exporter in self.exporters: + try: + _exporter(_telemetry) + except Exception as e: + self.log_error("Exporter Error %s" % str(e)) - # If we have been provided a telemetry filter function, pass the telemetry data - # through the filter, and return the response - if self.telem_filter is not None: - try: - _ok = self.telem_filter(_telemetry) - return _ok - except Exception as e: - self.log_error("Failed to run telemetry filter - %s" % str(e)) - - # Otherwise, just assume the telemetry is good. - else: - return True + return _telem_ok @@ -405,7 +406,7 @@ class SondeDecoder(object): Args: line (str): Message to be logged. """ - logging.debug("Decoder %s %.3f - %s" % (self.sonde_type, self.sonde_freq/1e6, line)) + logging.debug("Decoder #%s %s %.3f - %s" % (str(self.device_idx), self.sonde_type, self.sonde_freq/1e6, line)) def log_info(self, line): @@ -413,7 +414,7 @@ class SondeDecoder(object): Args: line (str): Message to be logged. """ - logging.info("Decoder %s %.3f - %s" % (self.sonde_type, self.sonde_freq/1e6, line)) + logging.info("Decoder #%s %s %.3f - %s" % (str(self.device_idx), self.sonde_type, self.sonde_freq/1e6, line)) def log_error(self, line): @@ -421,7 +422,7 @@ class SondeDecoder(object): Args: line (str): Message to be logged. """ - logging.error("Decoder %s %.3f - %s" % (self.sonde_type, self.sonde_freq/1e6, line)) + logging.error("Decoder #%s %s %.3f - %s" % (str(self.device_idx), self.sonde_type, self.sonde_freq/1e6, line)) def stop(self): diff --git a/auto_rx/autorx/gps.py b/auto_rx/autorx/gps.py index a8f747a..2fd5ece 100644 --- a/auto_rx/autorx/gps.py +++ b/auto_rx/autorx/gps.py @@ -13,7 +13,7 @@ import os def get_ephemeris(destination="ephemeris.dat"): ''' Download the latest GPS ephemeris file from the CDDIS's FTP server ''' try: - logging.info("Connecting to GSFC FTP Server...") + logging.debug("GPS Grabber - Connecting to GSFC FTP Server...") ftp = ftplib.FTP("cddis.gsfc.nasa.gov", timeout=10) ftp.login("anonymous","anonymous") ftp.cwd("gnss/data/daily/%s/brdc/" % datetime.datetime.utcnow().strftime("%Y")) @@ -28,10 +28,10 @@ def get_ephemeris(destination="ephemeris.dat"): elif file_suffix in file_list[-2]: download_file = file_list[-2] else: - logging.error("Could not find appropriate ephemeris file.") + logging.error("GPS Grabber - Could not find appropriate ephemeris file.") return None - logging.info("Downloading ephemeris data file: %s" % download_file) + logging.debug("GPS Grabber - Downloading ephemeris data file: %s" % download_file) # Download file. f_eph = open(destination+".Z",'wb') @@ -42,11 +42,11 @@ def get_ephemeris(destination="ephemeris.dat"): # Unzip file. os.system("gunzip -q -f ./%s" % (destination+".Z")) - logging.info("Ephemeris downloaded to %s successfuly!" % destination) + logging.info("GPS Grabber - Ephemeris downloaded to %s successfuly!" % destination) return destination except Exception as e: - logging.error("Could not download ephemeris file. - %s" % str(e)) + logging.error("GPS Grabber - Could not download ephemeris file. - %s" % str(e)) return None def get_almanac(destination="almanac.txt", timeout=20): @@ -58,13 +58,13 @@ def get_almanac(destination="almanac.txt", timeout=20): f = open(destination,'w') f.write(data) f.close() - logging.info("Almanac downloaded to %s successfuly!" % destination) + logging.info("GPS Grabber - Almanac downloaded to %s successfuly!" % destination) return destination else: - logging.error("Downloaded file is not a GPS almanac.") + logging.error("GPS Grabber - Downloaded file is not a GPS almanac.") return None except Exception as e: - logging.error("Failed to download almanac data - " % str(e)) + logging.error("GPS Grabber - Failed to download almanac data - " % str(e)) return None diff --git a/auto_rx/autorx/habitat.py b/auto_rx/autorx/habitat.py index dec2081..158a797 100644 --- a/auto_rx/autorx/habitat.py +++ b/auto_rx/autorx/habitat.py @@ -16,6 +16,7 @@ import json from base64 import b64encode from hashlib import sha256 from threading import Thread +from . import __version__ as auto_rx_version try: # Python 2 from Queue import Queue @@ -325,7 +326,7 @@ def uploadListenerPosition(callsign, lat, lon, version=''): # If this fails, it means we can't contact the Habitat server, # so there is no point continuing. if resp is False: - return + return False doc = { 'type': 'listener_telemetry', @@ -344,8 +345,10 @@ def uploadListenerPosition(callsign, lat, lon, version=''): resp = postListenerData(doc) if resp is True: logging.info("Habitat - Listener information uploaded.") + return True else: logging.error("Habitat - Unable to upload listener information.") + return False # @@ -394,7 +397,7 @@ class HabitatUploader(object): when a new sonde ID is observed. payload_callsign_override (str): Override the payload callsign in the uploaded sentence with this value. - WARNING: This will horrible break the tracker map if multiple sondes are uploaded under the same callsign. + WARNING: This will horribly break the tracker map if multiple sondes are uploaded under the same callsign. USE WITH CAUTION!!! synchronous_upload_time (int): Upload the most recent telemetry when time.time()%synchronous_upload_time == 0 @@ -413,6 +416,7 @@ class HabitatUploader(object): """ self.user_callsign = user_callsign + self.user_position = user_position self.payload_callsign_override = payload_callsign_override self.upload_timeout = upload_timeout self.upload_retries = upload_retries @@ -434,6 +438,7 @@ class HabitatUploader(object): # 'data' (Queue): A queue of telemetry sentences to be uploaded. When the upload timer fires, # this queue will be dumped, and the most recent telemetry uploaded. # 'habitat_document' (bool): Indicates if a habitat document has been created for this payload ID. + # 'listener_updated' (bool): Indicates if the listener position has been updated for the start of this ID's flight. self.observed_payloads = {} # Start the uploader thread. @@ -450,6 +455,10 @@ class HabitatUploader(object): self.timer_thread = Thread(target=self.upload_timer) self.timer_thread.start() + # Upload listener position + if self.user_position is not None: + uploadListenerPosition(self.user_callsign, self.user_position[0], self.user_position[1], version=auto_rx_version) + def habitat_upload(self, sentence): @@ -627,7 +636,7 @@ class HabitatUploader(object): if _id not in self.observed_payloads: # We haven't seen this ID before, so create a new dictionary entry for it. - self.observed_payloads[_id] = {'count':1, 'data':Queue(), 'habitat_document': False} + self.observed_payloads[_id] = {'count':1, 'data':Queue(), 'habitat_document': False, 'listener_updated': False} self.log_debug("New Payload %s. Not observed enough to allow upload." % _id) # However, we don't yet add anything to the queue for this payload... else: @@ -637,7 +646,16 @@ class HabitatUploader(object): # If we have seen this particular ID enough times, add the data to the ID's queue. if self.observed_payloads[_id]['count'] >= self.callsign_validity_threshold: + # Add the telemetry to the queue self.observed_payloads[_id]['data'].put(_telem) + + # If this is the first time we have observed this payload, update the listener position. + if (self.observed_payloads[_id]['listener_updated'] == False) and (self.user_position is not None): + self.observed_payloads[_id]['listener_updated'] = uploadListenerPosition( + self.user_callsign, + self.user_position[0], + self.user_position[1], + version=auto_rx_version) else: self.log_debug("Payload ID %s not observed enough to allow upload." % _id) @@ -717,23 +735,5 @@ class HabitatUploader(object): logging.warning("Habitat - %s" % line) -# -# Functions for uploading telemetry to Habitat -# - - - - -# DEPRECATED - USE -def habitat_upload_payload_telemetry(uploader, telemetry, payload_callsign = "RADIOSONDE", callsign="N0CALL", comment=None): - ''' Add a packet of radiosonde telemetry to the Habitat uploader queue. ''' - - sentence = telemetry_to_sentence(telemetry, payload_callsign = payload_callsign, comment=comment) - - try: - uploader.add(sentence) - except Exception as e: - logging.error("Could not add telemetry to Habitat Uploader - %s" % str(e)) - diff --git a/auto_rx/autorx/scan.py b/auto_rx/autorx/scan.py index edd96f8..0a878b6 100644 --- a/auto_rx/autorx/scan.py +++ b/auto_rx/autorx/scan.py @@ -80,15 +80,15 @@ def run_rtl_power(start, stop, step, filename="log_power.csv", dwell = 20, sdr_p gain_param, filename) - logging.info("Scanner - Running frequency scan.") - logging.debug("Scanner - Running command: %s" % rtl_power_cmd) + logging.info("Scanner #%s - Running frequency scan." % str(device_idx)) + #logging.debug("Scanner - Running command: %s" % rtl_power_cmd) try: FNULL = open(os.devnull, 'w') subprocess.check_call(rtl_power_cmd, shell=True, stderr=FNULL) FNULL.close() except subprocess.CalledProcessError: - logging.critical("Scanner - rtl_power call failed!") + logging.critical("Scanner #%s - rtl_power call failed!" % str(device_idx)) return False else: return True @@ -192,8 +192,7 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device rx_test_command += "sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -t wav - highpass 20 2>/dev/null |" rx_test_command += os.path.join(rs_path,"rs_detect") + " -z -t 8 2>/dev/null >/dev/null" - logging.info("Scanner - Attempting sonde detection on %.3f MHz" % (frequency/1e6)) - logging.debug("Scanner - Running command: %s" % rx_test_command) + logging.debug("Scanner #%s - Attempting sonde detection on %.3f MHz" % (str(device_idx), frequency/1e6)) try: FNULL = open(os.devnull, 'w') @@ -201,7 +200,7 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device FNULL.close() except Exception as e: # Something broke when running the detection function. - logging.error("Scanner - Error when running rs_detect - %s" % str(e)) + logging.error("Scanner #%s - Error when running rs_detect - %s" % (str(device_idx), str(e))) return None # Shift down by a byte... for some reason. @@ -223,19 +222,19 @@ def detect_sonde(frequency, rs_path="./", dwell_time=10, sdr_fm='rtl_fm', device ret_code = abs(ret_code) if ret_code == 3: - logging.info("Scanner - Detected a RS41!") + logging.debug("Scanner #%s - Detected a RS41!" % str(device_idx)) return inv+"RS41" elif ret_code == 4: - logging.info("Scanner - Detected a RS92!") + logging.debug("Scanner #%s - Detected a RS92!" % str(device_idx)) return inv+"RS92" elif ret_code == 2: - logging.info("Scanner - Detected a DFM Sonde!") + logging.debug("Scanner #%s - Detected a DFM Sonde!" % str(device_idx)) return inv+"DFM" elif ret_code == 5: - logging.info("Scanner - Detected a M10 Sonde! (Unsupported)") + logging.debug("Scanner #%s - Detected a M10 Sonde! (Unsupported)" % str(device_idx)) return inv+"M10" elif ret_code == 6: - logging.info("Scanner - Detected a iMet Sonde! (Unsupported)") + logging.debug("Scanner #%s - Detected a iMet Sonde! (Unsupported)" % str(device_idx)) return inv+"iMet" else: return None @@ -267,6 +266,7 @@ class SondeScanner(object): quantization = 10000, scan_dwell_time = 20, detect_dwell_time = 5, + scan_delay = 10, max_peaks = 10, rs_path = "./", sdr_power = "rtl_power", @@ -295,6 +295,7 @@ class SondeScanner(object): Essentially all radiosondes transmit on 10 kHz channel steps. scan_dwell_time (int): Number of seconds for rtl_power to average spectrum over. Default = 20 seconds. detect_dwell_time (int): Number of seconds to allow rs_detect to attempt to detect a sonde. Default = 5 seconds. + scan_delay (int): Delay X seconds between scan runs. max_peaks (int): Maximum number of peaks to search over. Peaks are ordered by signal power before being limited to this number. rs_path (str): Path to the RS binaries (i.e rs_detect). Defaults to ./ sdr_power (str): Path to rtl_power, or drop-in equivalent. Defaults to 'rtl_power' @@ -321,6 +322,7 @@ class SondeScanner(object): self.quantization = quantization self.scan_dwell_time = scan_dwell_time self.detect_dwell_time = detect_dwell_time + self.scan_delay = scan_delay self.max_peaks = max_peaks self.rs_path = rs_path self.sdr_power = sdr_power @@ -360,6 +362,20 @@ class SondeScanner(object): self.log_warning("Sonde scan already running!") + def send_to_callback(self, results): + """ Send scan results to a callback. + + Args: + results (list): List consisting of [freq, type)] + + """ + try: + if self.callback != None: + self.callback(results) + except Exception as e: + self.log_error("Error handling scan results - %s" % str(e)) + + def scan_loop(self): """ Continually perform scans, and pass any results onto the callback function """ @@ -389,13 +405,9 @@ class SondeScanner(object): else: # Scan completed successfuly! Reset the error counter. self.error_retries = 0 - # If we have scan results, pass them onto the callback. - if len(_results) > 0: - try: - if self.callback != None: - self.callback(_results) - except Exception as e: - self.log_error("Error handling scan results - %s" % str(e)) + + # Sleep before starting the next scan. + time.sleep(self.scan_delay) @@ -461,7 +473,7 @@ class SondeScanner(object): # If we have found no peaks, and no greylist has been provided, re-scan. if (len(peak_indices) == 0) and (len(self.greylist) == 0): - self.log_info("No peaks found.") + self.log_debug("No peaks found.") return [] # Sort peaks by power. @@ -489,10 +501,10 @@ class SondeScanner(object): peak_frequencies = np.append(np.array(self.greylist)*1e6, peak_frequencies) if len(peak_frequencies) == 0: - self.log_info("No peaks found after blacklist frequencies removed.") + self.log_debug("No peaks found after blacklist frequencies removed.") return [] else: - self.log_info("Performing scan on %d frequencies (MHz): %s" % (len(peak_frequencies),str(peak_frequencies/1e6))) + self.log_info("Detected peaks on %d frequencies (MHz): %s" % (len(peak_frequencies),str(peak_frequencies/1e6))) else: # We have been provided a whitelist - scan through the supplied frequencies. @@ -517,6 +529,9 @@ class SondeScanner(object): if detected != None: # Add a detected sonde to the output array _search_results.append([freq, detected]) + + # Immediately send this result to the callback. + self.send_to_callback([[freq, detected]]) # If we only want the first detected sonde, then return now. if first_only: return _search_results @@ -524,9 +539,9 @@ class SondeScanner(object): # Otherwise, we continue.... if len(_search_results) == 0: - self.log_info("No sondes detected.") + self.log_debug("No sondes detected.") else: - self.log_info("Detected Sondes: %s" % str(_search_results)) + self.log_debug("Scan Detected Sondes: %s" % str(_search_results)) return _search_results diff --git a/auto_rx/autorx2.py b/auto_rx/autorx2.py deleted file mode 100644 index ac00a5e..0000000 --- a/auto_rx/autorx2.py +++ /dev/null @@ -1,370 +0,0 @@ -#!/usr/bin/env python -# -# Radiosonde Auto RX Service - V2.0 -# -# Copyright (C) 2018 Mark Jessop -# Released under GNU GPL v3 or later -# -# Refer github page for instructions on setup and usage. -# https://github.com/projecthorus/radiosonde_auto_rx/ -# -import argparse -import datetime -import logging -import sys -import time -import traceback - -from autorx.scan import SondeScanner -from autorx.decode import SondeDecoder -from autorx.logger import TelemetryLogger -from autorx.habitat import HabitatUploader -from autorx.utils import rtlsdr_test -from autorx.config import read_auto_rx_config - -try: - # Python 2 - from Queue import Queue -except ImportError: - # Python 3 - from queue import Queue - - -# Logging level -# INFO = Basic status messages -# DEBUG = Adds detailed information on submodule operations. -logging_level = logging.DEBUG - - -# -# Global Variables -# - -RS_PATH = "./" - -# Optional override for RS92 ephemeris data. -rs92_ephemeris = None - -# Global configuration dictionary -config = None - -# Exporter Lists -exporter_objects = [] # This list will hold references to each exporter instance that is created. -exporter_functions = [] # This list will hold references to the exporter add functions, which will be passed onto the decoders. - -# RTLSDR Usage Register - This dictionary holds information about each SDR and its currently running Decoder / Scanner -# Key = SDR device index / ID -# 'device_idx': { -# 'in_use' (bool) : True if the SDR is currently in-use by a decoder or scanner. -# 'task' (class) : If this SDR is in use, a reference to the task. -# 'bias' (bool) : True if the bias-tee should be enabled on this SDR, False otherwise. -# 'ppm' (int) : The PPM offset for this SDR. -# 'gain' (float) : The gain setting to use with this SDR. A setting of -1 turns on hardware AGC. -# } -# -# -sdr_list = {} - -# Currently running task register. -# Keys will either be 'SCAN' (only one scanner shall be running at a time), or a sonde frequency in MHz. -# Each element contains: -# 'task' : (class) Reference to the currently running task. -# 'device_idx' (str): The allocated SDR. -# -task_list = {} - - -# Scan Result Queue -# Scan results are processed asynchronously from the main scanner object. -scan_results = Queue() - - -def allocate_sdr(check_only = False): - """ Allocate an un-used SDR for a task. - - Args: - check_only (bool) : If True, don't set the free SDR as in-use. Used to check if there are any free SDRs. - - Returns: - (str): The device index/serial number of the free/allocated SDR, if one is free, else None. - """ - global sdr_list - - for _idx in sdr_list.keys(): - if sdr_list[_idx]['in_use'] == False: - # Found a free SDR! - if check_only: - # If we are just checking to see if there are any SDRs free, we don't allocate it. - pass - else: - # Otherwise, set the SDR as in-use. - sdr_list[_idx]['in_use'] = True - logging.info("SDR #%s has been allocated." % str(_idx)) - - return _idx - - # Otherwise, no SDRs are free. - return None - - -def clean_task_list(): - """ Routinely run to check the task list to see if any tasks have stopped running. If so, release the associated SDR """ - global task_list, sdr_list - - for _key in task_list.keys(): - # Attempt to get the state of the task - try: - _running = task_list[_key]['task'].running() - _task_sdr = task_list[_key]['device_idx'] - except Exception as e: - logging.error("Task Manager - Error getting task %s state - %s" % (str(_key),str(e))) - continue - - if _running == False: - # This task has stopped. Release it's associated SDR. - sdr_list[_task_sdr]['in_use'] = False - sdr_list[_task_sdr]['task'] = None - # Pop the task from the task list. - task_list.pop(_key) - - # Check if there is a scanner thread still running. If not, and if there is a SDR free, start one up again. - if ('SCAN' not in task_list) and (allocate_sdr(check_only=True) is not None): - # We have a SDR free, and we are not running a scan thread. Start one. - start_scanner() - - - -def start_scanner(): - """ Start a scanner thread on the first available SDR """ - global task_list, sdr_list, config, scan_results, RS_PATH - - if 'SCAN' in task_list: - # Already a scanner running! Return. - logging.debug("Task Manager - Attempted to start a scanner, but one already running.") - return - - # Attempt to allocate a SDR. - _device_idx = allocate_sdr() - if _device_idx is None: - logging.debug("Task Manager - No SDRs free to run Scanner.") - return - else: - # Create entry in task list. - task_list['SCAN'] = {'device_idx': _device_idx, 'task': None} - - # Init Scanner using settings from the global config. - - task_list['SCAN']['task'] = SondeScanner( - callback = scan_results.put, - auto_start = True, - min_freq = config['min_freq'], - max_freq = config['max_freq'], - search_step = config['search_step'], - whitelist = config['whitelist'], - greylist = config['greylist'], - blacklist = config['blacklist'], - snr_threshold = config['snr_threshold'], - min_distance = config['min_distance'], - quantization = config['quantization'], - scan_dwell_time = config['scan_dwell_time'], - detect_dwell_time = config['detect_dwell_time'], - max_peaks = config['max_peaks'], - rs_path = RS_PATH, - sdr_power = config['sdr_power'], - sdr_fm = config['sdr_fm'], - device_idx = _device_idx, - gain = sdr_list[_device_idx]['gain'], - ppm = sdr_list[_device_idx]['ppm'], - bias = sdr_list[_device_idx]['bias'] - ) - - # Add a reference into the sdr_list entry - sdr_list[_device_idx]['task'] = task_list['SCAN']['task'] - - -def stop_scanner(): - """ Stop a currently running scan thread, and release the SDR it was using. """ - global task_list, sdr_list - - if 'SCAN' not in task_list: - # No scanner thread running! - # This means we likely have a SDR free already. - return - else: - logging.info("Halting Scanner to decode detected radiosonde.") - _scan_sdr = task_list['SCAN']['device_idx'] - # Stop the scanner. - task_list['SCAN']['task'].stop() - # Relase the SDR. - sdr_list[_scan_sdr]['in_use'] = False - sdr_list[_scan_sdr]['task'] = None - # Remove the scanner task from the task list - task_list.pop('SCAN') - - -def start_decoder(freq, sonde_type): - """ Attempt to start a decoder thread """ - global config, task_list, sdr_list, RS_PATH, exporter_functions, rs92_ephemeris - - # Allocate a SDR. - _device_idx = allocate_sdr() - - if _device_idx is None: - logging.error("Could not allocate SDR for decoder!") - return - else: - # Add an entry to the task list - task_list[freq] = {'device_idx': _device_idx, 'task': None} - - # Set the SDR to in-use - sdr_list[_device_idx]['in_use'] = True - - # Initialise a decoder. - task_list[freq]['task'] = SondeDecoder( - sonde_type = sonde_type, - sonde_freq = freq, - rs_path = RS_PATH, - sdr_fm = config['sdr_fm'], - device_idx = _device_idx, - gain = sdr_list[_device_idx]['gain'], - ppm = sdr_list[_device_idx]['ppm'], - bias = sdr_list[_device_idx]['bias'], - exporter = exporter_functions, - timeout = config['rx_timeout'], - telem_filter = telemetry_filter, - rs92_ephemeris = rs92_ephemeris - ) - sdr_list[_device_idx]['task'] = task_list[freq]['task'] - - - -def handle_scan_results(): - """ Read in Scan results via the scan results Queue. - - Depending on how many SDRs are available, two things can happen: - - If there is a free SDR, allocate it to a decoder. - - If there is no free SDR, but a scanner is running, stop the scanner and start decoding. - """ - global scan_results, task_list, sdr_list - if scan_results.qsize() > 0: - _scan_data = scan_results.get() - for _sonde in _scan_data: - _freq = _sonde[0] - _type = _sonde[1] - - if _freq in task_list: - # Already decoding this sonde, continue. - continue - else: - - if allocate_sdr(check_only=True) is not None : - # There is a SDR free! Start the decoder on that SDR - start_decoder(_freq, _type) - - elif (allocate_sdr(check_only=True) is None) and ('SCAN' in task_list): - # We have run out of SDRs, but a scan thread is running. - # Stop the scan thread and take that receiver! - stop_scanner() - start_decoder(_freq, _type) - else: - # We have no SDRs free - pass - - -def stop_all(): - """ Shut-down all decoders, scanners, and exporters. """ - global task_list, exporter_objects - logging.info("Starting shutdown of all threads.") - for _task in task_list.keys(): - try: - task_list[_task]['task'].stop() - except Exception as e: - logging.error("Error stopping task - %s" % str(e)) - - for _exporter in exporter_objects: - try: - _exporter.close() - except Exception as e: - logging.error("Error stopping exporter - %s" % str(e)) - - -def telemetry_filter(telemetry): - """ Filter incoming radiosonde telemetry based on distance from the receiver """ - global config - - # TODO - return True - - -def main(): - """ Main Loop """ - global config, sdr_list, exporter_objects, exporter_functions - - logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', filename=datetime.datetime.utcnow().strftime("log/%Y%m%d-%H%M%S_system.log"), level=logging_level) - stdout_format = logging.Formatter('%(asctime)s %(levelname)s:%(message)s') - stdout_handler = logging.StreamHandler(sys.stdout) - stdout_handler.setFormatter(stdout_format) - logging.getLogger().addHandler(stdout_handler) - - # Set the requests logger to only display WARNING messages or higher. - requests_log = logging.getLogger("requests") - requests_log.setLevel(logging.CRITICAL) - urllib3_log = logging.getLogger("urllib3") - urllib3_log.setLevel(logging.CRITICAL) - - # Command line arguments. - parser = argparse.ArgumentParser() - parser.add_argument("-c" ,"--config", default="station_new.cfg", help="Receive Station Configuration File") - parser.add_argument("-f", "--frequency", type=float, default=0.0, help="Sonde Frequency (MHz) (bypass scan step, and quit if no sonde found).") - parser.add_argument("-e", "--ephemeris", type=str, default="None", help="Use a manually obtained ephemeris file.") - args = parser.parse_args() - - # Attempt to read in config file - logging.info("Reading configuration file...") - _temp_cfg = read_auto_rx_config(args.config) - if _temp_cfg is None: - logging.critical("Error in configuration file! Exiting...") - sys.exit(1) - - else: - config = _temp_cfg - sdr_list = config['sdr_settings'] - - # If we have been supplied a frequency via the command line, override the whitelist settings. - if args.frequency != 0.0: - config['whitelist'] = [args.frequency] - - - # Start our exporter options - if config['per_sonde_log']: - _logger = TelemetryLogger(log_directory="./testlog/") - exporter_objects.append(_logger) - exporter_functions.append(_logger.add) - - - # Habitat - - # APRS - - # OziExplorer - - - while True: - clean_task_list() - handle_scan_results() - time.sleep(5) - - - - -if __name__ == "__main__": - - try: - main() - except KeyboardInterrupt: - stop_all() - except Exception as e: - traceback.print_exc() - print("Main Loop Error - %s" % str(e)) - stop_all() - diff --git a/auto_rx/config_reader.py b/auto_rx/config_reader.py deleted file mode 100644 index 9ff5a94..0000000 --- a/auto_rx/config_reader.py +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/env python -# -# Radiosonde Auto RX Tools - Configuration File Parser -# -# 2017-04 Mark Jessop -# -import ConfigParser -import logging -import traceback -import json - -def read_auto_rx_config(filename): - # Configuration Defaults: - auto_rx_config = { - 'per_sonde_log' : True, - 'sdr_fm_path': 'rtl_fm', - 'sdr_power_path': 'rtl_power', - 'sdr_ppm' : 0, - 'sdr_gain' : -1, - 'sdr_bias' : False, - 'search_attempts': 5, - 'search_delay' : 10, - 'min_freq' : 400.4, - 'max_freq' : 404.0, - 'search_step' : 800, - 'min_snr' : 10, - 'min_distance' : 1000, - 'dwell_time' : 10, - 'quantization' : 10000, - 'rx_timeout' : 120, - 'station_lat' : 0.0, - 'station_lon' : 0.0, - 'station_alt' : 0.0, - 'upload_rate' : 30, - 'synchronous_upload' : False, - 'enable_aprs' : False, - 'enable_habitat': False, - 'aprs_user' : 'N0CALL', - 'aprs_pass' : '00000', - 'aprs_server' : 'rotate.aprs2.net', - 'aprs_object_id': '', - 'aprs_custom_comment': 'Radiosonde Auto-RX ', - 'payload_callsign': '', - 'payload_description': 'Meteorological Radiosonde', - 'uploader_callsign': 'SONDE_AUTO_RX', - 'upload_listener_position': False, - 'enable_rotator': False, - 'rotator_hostname': '127.0.0.1', - 'rotator_port' : 4533, - 'rotator_homing_enabled': False, - 'rotator_home_azimuth': 0, - 'rotator_home_elevation': 0, - 'ozi_enabled' : False, - 'ozi_update_rate': 5, - 'ozi_hostname' : '127.0.0.1', - 'ozi_port' : 55681, - 'mqtt_enabled' : False, - 'mqtt_hostname' : '127.0.0.1', - 'mqtt_port' : 1883, - 'payload_summary_enabled': False, - 'payload_summary_port' : 55672, - 'whitelist' : [], - 'blacklist' : [], - 'greylist' : [], - 'max_altitude' : 50000, - 'max_radius_km' : 1000, - 'payload_id_valid' : 5 # TODO: Add this to config file in next bulk update. - } - - try: - config = ConfigParser.RawConfigParser(auto_rx_config) - config.read(filename) - - auto_rx_config['per_sonde_log'] = config.getboolean('logging', 'per_sonde_log') - auto_rx_config['sdr_fm_path'] = config.get('sdr','sdr_fm_path') - auto_rx_config['sdr_power_path'] = config.get('sdr','sdr_power_path') - auto_rx_config['sdr_ppm'] = int(config.getfloat('sdr', 'sdr_ppm')) - auto_rx_config['sdr_gain'] = config.getfloat('sdr', 'sdr_gain') - auto_rx_config['sdr_bias'] = config.getboolean('sdr', 'sdr_bias') - auto_rx_config['search_attempts'] = config.getint('search_params', 'search_attempts') - auto_rx_config['search_delay'] = config.getint('search_params', 'search_delay') - auto_rx_config['min_freq'] = config.getfloat('search_params', 'min_freq') - auto_rx_config['max_freq'] = config.getfloat('search_params', 'max_freq') - auto_rx_config['search_step'] = config.getfloat('search_params', 'search_step') - auto_rx_config['min_snr'] = config.getfloat('search_params', 'min_snr') - auto_rx_config['min_distance'] = config.getfloat('search_params', 'min_distance') - auto_rx_config['dwell_time'] = config.getint('search_params', 'dwell_time') - auto_rx_config['quantization'] = config.getint('search_params', 'quantization') - auto_rx_config['rx_timeout'] = config.getint('search_params', 'rx_timeout') - auto_rx_config['station_lat'] = config.getfloat('location', 'station_lat') - auto_rx_config['station_lon'] = config.getfloat('location', 'station_lon') - auto_rx_config['station_alt'] = config.getfloat('location', 'station_alt') - auto_rx_config['upload_rate'] = config.getint('upload', 'upload_rate') - auto_rx_config['synchronous_upload'] = config.getboolean('upload','synchronous_upload') - auto_rx_config['enable_aprs'] = config.getboolean('upload', 'enable_aprs') - auto_rx_config['enable_habitat'] = config.getboolean('upload', 'enable_habitat') - auto_rx_config['aprs_user'] = config.get('aprs', 'aprs_user') - auto_rx_config['aprs_pass'] = config.get('aprs', 'aprs_pass') - auto_rx_config['aprs_server'] = config.get('aprs', 'aprs_server') - auto_rx_config['aprs_object_id'] = config.get('aprs', 'aprs_object_id') - auto_rx_config['aprs_custom_comment'] = config.get('aprs', 'aprs_custom_comment') - auto_rx_config['payload_callsign'] = config.get('habitat', 'payload_callsign') - auto_rx_config['payload_description'] = config.get('habitat', 'payload_description') - auto_rx_config['uploader_callsign'] = config.get('habitat', 'uploader_callsign') - auto_rx_config['upload_listener_position'] = config.getboolean('habitat','upload_listener_position') - auto_rx_config['enable_rotator'] = config.getboolean('rotator','enable_rotator') - auto_rx_config['rotator_hostname'] = config.get('rotator', 'rotator_hostname') - auto_rx_config['rotator_port'] = config.getint('rotator', 'rotator_port') - auto_rx_config['rotator_homing_enabled'] = config.getboolean('rotator', 'rotator_homing_enabled') - auto_rx_config['rotator_home_azimuth'] = config.getfloat('rotator', 'rotator_home_azimuth') - auto_rx_config['rotator_home_elevation'] = config.getfloat('rotator', 'rotator_home_elevation') - auto_rx_config['ozi_enabled'] = config.getboolean('oziplotter', 'ozi_enabled') - auto_rx_config['ozi_update_rate'] = config.getint('oziplotter', 'ozi_update_rate') - auto_rx_config['ozi_port'] = config.getint('oziplotter', 'ozi_port') - auto_rx_config['payload_summary_enabled'] = config.getboolean('oziplotter', 'payload_summary_enabled') - auto_rx_config['payload_summary_port'] = config.getint('oziplotter', 'payload_summary_port') - - # Read in lists using a JSON parser. - auto_rx_config['whitelist'] = json.loads(config.get('search_params', 'whitelist')) - auto_rx_config['blacklist'] = json.loads(config.get('search_params', 'blacklist')) - auto_rx_config['greylist'] = json.loads(config.get('search_params', 'greylist')) - - # Position Filtering - auto_rx_config['max_altitude'] = config.getint('filtering', 'max_altitude') - auto_rx_config['max_radius_km'] = config.getint('filtering', 'max_radius_km') - - # MQTT settings - auto_rx_config['mqtt_enabled'] = config.getboolean('mqtt', 'mqtt_enabled') - auto_rx_config['mqtt_hostname'] = config.get('mqtt', 'mqtt_hostname') - auto_rx_config['mqtt_port'] = config.getint('mqtt', 'mqtt_port') - - return auto_rx_config - - except: - traceback.print_exc() - logging.error("Could not parse config file, using defaults.") - return auto_rx_config - - -if __name__ == '__main__': - ''' Quick test script to attempt to read in a config file. ''' - import sys - print(read_auto_rx_config(sys.argv[1])) - - diff --git a/auto_rx/findpeaks.py b/auto_rx/findpeaks.py deleted file mode 100644 index 816c61c..0000000 --- a/auto_rx/findpeaks.py +++ /dev/null @@ -1,175 +0,0 @@ -"""Detect peaks in data based on their amplitude and other features.""" - -from __future__ import division, print_function -import numpy as np - -__author__ = "Marcos Duarte, https://github.com/demotu/BMC" -__version__ = "1.0.4" -__license__ = "MIT" - - -def detect_peaks(x, mph=None, mpd=1, threshold=0, edge='rising', - kpsh=False, valley=False, show=False, ax=None): - - """Detect peaks in data based on their amplitude and other features. - - Parameters - ---------- - x : 1D array_like - data. - mph : {None, number}, optional (default = None) - detect peaks that are greater than minimum peak height. - mpd : positive integer, optional (default = 1) - detect peaks that are at least separated by minimum peak distance (in - number of data). - threshold : positive number, optional (default = 0) - detect peaks (valleys) that are greater (smaller) than `threshold` - in relation to their immediate neighbors. - edge : {None, 'rising', 'falling', 'both'}, optional (default = 'rising') - for a flat peak, keep only the rising edge ('rising'), only the - falling edge ('falling'), both edges ('both'), or don't detect a - flat peak (None). - kpsh : bool, optional (default = False) - keep peaks with same height even if they are closer than `mpd`. - valley : bool, optional (default = False) - if True (1), detect valleys (local minima) instead of peaks. - show : bool, optional (default = False) - if True (1), plot data in matplotlib figure. - ax : a matplotlib.axes.Axes instance, optional (default = None). - - Returns - ------- - ind : 1D array_like - indeces of the peaks in `x`. - - Notes - ----- - The detection of valleys instead of peaks is performed internally by simply - negating the data: `ind_valleys = detect_peaks(-x)` - - The function can handle NaN's - - See this IPython Notebook [1]_. - - References - ---------- - .. [1] http://nbviewer.ipython.org/github/demotu/BMC/blob/master/notebooks/DetectPeaks.ipynb - - Examples - -------- - >>> from detect_peaks import detect_peaks - >>> x = np.random.randn(100) - >>> x[60:81] = np.nan - >>> # detect all peaks and plot data - >>> ind = detect_peaks(x, show=True) - >>> print(ind) - - >>> x = np.sin(2*np.pi*5*np.linspace(0, 1, 200)) + np.random.randn(200)/5 - >>> # set minimum peak height = 0 and minimum peak distance = 20 - >>> detect_peaks(x, mph=0, mpd=20, show=True) - - >>> x = [0, 1, 0, 2, 0, 3, 0, 2, 0, 1, 0] - >>> # set minimum peak distance = 2 - >>> detect_peaks(x, mpd=2, show=True) - - >>> x = np.sin(2*np.pi*5*np.linspace(0, 1, 200)) + np.random.randn(200)/5 - >>> # detection of valleys instead of peaks - >>> detect_peaks(x, mph=0, mpd=20, valley=True, show=True) - - >>> x = [0, 1, 1, 0, 1, 1, 0] - >>> # detect both edges - >>> detect_peaks(x, edge='both', show=True) - - >>> x = [-2, 1, -2, 2, 1, 1, 3, 0] - >>> # set threshold = 2 - >>> detect_peaks(x, threshold = 2, show=True) - """ - - x = np.atleast_1d(x).astype('float64') - if x.size < 3: - return np.array([], dtype=int) - if valley: - x = -x - # find indices of all peaks - dx = x[1:] - x[:-1] - # handle NaN's - indnan = np.where(np.isnan(x))[0] - if indnan.size: - x[indnan] = np.inf - dx[np.where(np.isnan(dx))[0]] = np.inf - ine, ire, ife = np.array([[], [], []], dtype=int) - if not edge: - ine = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) > 0))[0] - else: - if edge.lower() in ['rising', 'both']: - ire = np.where((np.hstack((dx, 0)) <= 0) & (np.hstack((0, dx)) > 0))[0] - if edge.lower() in ['falling', 'both']: - ife = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) >= 0))[0] - ind = np.unique(np.hstack((ine, ire, ife))) - # handle NaN's - if ind.size and indnan.size: - # NaN's and values close to NaN's cannot be peaks - ind = ind[np.in1d(ind, np.unique(np.hstack((indnan, indnan-1, indnan+1))), invert=True)] - # first and last values of x cannot be peaks - if ind.size and ind[0] == 0: - ind = ind[1:] - if ind.size and ind[-1] == x.size-1: - ind = ind[:-1] - # remove peaks < minimum peak height - if ind.size and mph is not None: - ind = ind[x[ind] >= mph] - # remove peaks - neighbors < threshold - if ind.size and threshold > 0: - dx = np.min(np.vstack([x[ind]-x[ind-1], x[ind]-x[ind+1]]), axis=0) - ind = np.delete(ind, np.where(dx < threshold)[0]) - # detect small peaks closer than minimum peak distance - if ind.size and mpd > 1: - ind = ind[np.argsort(x[ind])][::-1] # sort ind by peak height - idel = np.zeros(ind.size, dtype=bool) - for i in range(ind.size): - if not idel[i]: - # keep peaks with the same height if kpsh is True - idel = idel | (ind >= ind[i] - mpd) & (ind <= ind[i] + mpd) \ - & (x[ind[i]] > x[ind] if kpsh else True) - idel[i] = 0 # Keep current peak - # remove the small peaks and sort back the indices by their occurrence - ind = np.sort(ind[~idel]) - - if show: - if indnan.size: - x[indnan] = np.nan - if valley: - x = -x - peak_plot(x, mph, mpd, threshold, edge, valley, ax, ind) - - return ind - - -def peak_plot(x, mph, mpd, threshold, edge, valley, ax, ind): - """Plot results of the detect_peaks function, see its help.""" - try: - import matplotlib.pyplot as plt - except ImportError: - print('matplotlib is not available.') - else: - if ax is None: - _, ax = plt.subplots(1, 1, figsize=(8, 4)) - - ax.plot(x, 'b', lw=1) - if ind.size: - label = 'valley' if valley else 'peak' - label = label + 's' if ind.size > 1 else label - ax.plot(ind, x[ind], '+', mfc=None, mec='r', mew=2, ms=8, - label='%d %s' % (ind.size, label)) - ax.legend(loc='best', framealpha=.5, numpoints=1) - ax.set_xlim(-.02*x.size, x.size*1.02-1) - ymin, ymax = x[np.isfinite(x)].min(), x[np.isfinite(x)].max() - yrange = ymax - ymin if ymax > ymin else 1 - ax.set_ylim(ymin - 0.1*yrange, ymax + 0.1*yrange) - ax.set_xlabel('Data #', fontsize=14) - ax.set_ylabel('Amplitude', fontsize=14) - mode = 'Valley detection' if valley else 'Peak detection' - ax.set_title("%s (mph=%s, mpd=%d, threshold=%s, edge='%s')" - % (mode, str(mph), mpd, str(threshold), edge)) - # plt.grid() - plt.show() \ No newline at end of file diff --git a/auto_rx/gps_grabber.py b/auto_rx/gps_grabber.py deleted file mode 100644 index 1f6650d..0000000 --- a/auto_rx/gps_grabber.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python -# -# Radiosonde Auto RX Tools - GPS Ephemeris / Almanac Grabber -# -# 2017-04 Mark Jessop -# -import ftplib -import requests -import datetime -import logging -import os - -def get_ephemeris(destination="ephemeris.dat"): - ''' Download the latest GPS ephemeris file from the CDDIS's FTP server ''' - try: - logging.info("Connecting to GSFC FTP Server...") - ftp = ftplib.FTP("cddis.gsfc.nasa.gov", timeout=10) - ftp.login("anonymous","anonymous") - ftp.cwd("gnss/data/daily/%s/brdc/" % datetime.datetime.utcnow().strftime("%Y")) - file_list= ftp.nlst() - - # We expect the latest files to be the last in the list. - download_file = None - file_suffix = datetime.datetime.utcnow().strftime("%yn.Z") - - if file_suffix in file_list[-1]: - download_file = file_list[-1] - elif file_suffix in file_list[-2]: - download_file = file_list[-2] - else: - logging.error("Could not find appropriate ephemeris file.") - return None - - logging.info("Downloading ephemeris data file: %s" % download_file) - - # Download file. - f_eph = open(destination+".Z",'w') - ftp.retrbinary("RETR %s" % download_file, f_eph.write) - f_eph.close() - ftp.close() - - # Unzip file. - os.system("gunzip -q -f ./%s" % (destination+".Z")) - - logging.info("Ephemeris downloaded to %s successfuly!" % destination) - - return destination - except: - logging.error("Could not download ephemeris file.") - return None - -def get_almanac(destination="almanac.txt", timeout=20): - ''' Download the latest GPS almanac file from the US Coast Guard website. ''' - try: - _r = requests.get("https://www.navcen.uscg.gov/?pageName=currentAlmanac&format=sem", timeout=timeout) - data = _r.text - if "CURRENT.ALM" in data: - f = open(destination,'wb') - f.write(data) - f.close() - logging.info("Almanac downloaded to %s successfuly!" % destination) - return destination - else: - logging.error("Downloaded file is not a GPS almanac.") - return None - except: - logging.error("Failed to download almanac data") - return None - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - get_almanac() - get_ephemeris() - - - diff --git a/auto_rx/habitat_utils.py b/auto_rx/habitat_utils.py deleted file mode 100644 index 5ab22da..0000000 --- a/auto_rx/habitat_utils.py +++ /dev/null @@ -1,480 +0,0 @@ -#!/usr/bin/env python -# -# Radiosonde Auto RX Tools - Habitat Upload -# -# 2018-04 Mark Jessop -# -import crcmod -import datetime -import logging -import Queue -import random -import requests -import time -import traceback -import json -from base64 import b64encode -from hashlib import sha256 -from threading import Thread - -# -# Habitat Uploader Class -# - -class HabitatUploader(object): - ''' - Queued Habitat Telemetry Uploader class - - Packets to be uploaded to Habitat are added to a queue for uploading. - If an upload attempt times out, the packet is discarded. - If the queue fills up (probably indicating no network connection, and a fast packet downlink rate), - it is immediately emptied, to avoid upload of out-of-date packets. - ''' - - - def __init__(self, user_callsign='N0CALL', - queue_size=16, - upload_timeout = 10, - upload_retries = 5, - upload_retry_interval = 0.25, - inhibit = False, - ): - ''' Create a Habitat Uploader object. ''' - - self.user_callsign = user_callsign - self.upload_timeout = upload_timeout - self.upload_retries = upload_retries - self.upload_retry_interval = upload_retry_interval - self.queue_size = queue_size - self.habitat_upload_queue = Queue.Queue(queue_size) - self.inhibit = inhibit - - # Start the uploader thread. - self.habitat_uploader_running = True - self.uploadthread = Thread(target=self.habitat_upload_thread) - self.uploadthread.start() - - def habitat_upload(self, sentence): - ''' Upload a UKHAS-standard telemetry sentence to Habitat ''' - - # Generate payload to be uploaded - _sentence_b64 = b64encode(sentence) - _date = datetime.datetime.utcnow().isoformat("T") + "Z" - _user_call = self.user_callsign - - _data = { - "type": "payload_telemetry", - "data": { - "_raw": _sentence_b64 - }, - "receivers": { - _user_call: { - "time_created": _date, - "time_uploaded": _date, - }, - }, - } - - # The URL to upload to. - _url = "http://habitat.habhub.org/habitat/_design/payload_telemetry/_update/add_listener/%s" % sha256(_sentence_b64).hexdigest() - - # Delay for a random amount of time between 0 and upload_retry_interval*2 seconds. - time.sleep(random.random()*self.upload_retry_interval*2.0) - - _retries = 0 - - # When uploading, we have three possible outcomes: - # - Can't connect. No point immediately re-trying in this situation. - # - The packet is uploaded successfuly (201 / 403) - # - There is a upload conflict on the Habitat DB end (409). We can retry and it might work. - while _retries < self.upload_retries: - # Run the request. - try: - _req = requests.put(_url, data=json.dumps(_data), timeout=self.upload_timeout) - except Exception as e: - logging.error("Habitat - Upload Failed: %s" % str(e)) - break - - if _req.status_code == 201 or _req.status_code == 403: - # 201 = Success, 403 = Success, sentence has already seen by others. - logging.info("Habitat - Uploaded sentence to Habitat successfully") - _upload_success = True - break - elif _req.status_code == 409: - # 409 = Upload conflict (server busy). Sleep for a moment, then retry. - logging.debug("Habitat - Upload conflict.. retrying.") - time.sleep(random.random()*self.upload_retry_interval) - _retries += 1 - else: - logging.error("Habitat - Error uploading to Habitat. Status Code: %d." % _req.status_code) - break - - if _retries == self.upload_retries: - logging.error("Habitat - Upload conflict not resolved with %d retries." % self.upload_retries) - - return - - - def habitat_upload_thread(self): - ''' Handle uploading of packets to Habitat ''' - - logging.info("Started Habitat Uploader Thread.") - - while self.habitat_uploader_running: - - if self.habitat_upload_queue.qsize() > 0: - # If the queue is completely full, jump to the most recent telemetry sentence. - if self.habitat_upload_queue.qsize() == self.queue_size: - while not self.habitat_upload_queue.empty(): - sentence = self.habitat_upload_queue.get() - - logging.warning("Habitat uploader queue was full - possible connectivity issue.") - else: - # Otherwise, get the first item in the queue. - sentence = self.habitat_upload_queue.get() - - # Attempt to upload it. - self.habitat_upload(sentence) - - else: - # Wait for a short time before checking the queue again. - time.sleep(0.1) - - logging.info("Stopped Habitat Uploader Thread.") - - - def add(self, sentence): - ''' Add a sentence to the upload queue ''' - - if self.inhibit: - # We have upload inhibited. Return. - return - - # Handling of arbitrary numbers of $$'s at the start of a sentence: - # Extract the data part of the sentence (i.e. everything after the $$'s') - sentence = sentence.split('$')[-1] - # Now add the *correct* number of $$s back on. - sentence = '$$' +sentence - - if not (sentence[-1] == '\n'): - sentence += '\n' - - try: - self.habitat_upload_queue.put_nowait(sentence) - except Queue.Full: - logging.error("Upload Queue is full, sentence discarded.") - except Exception as e: - logging.error("Error adding sentence to queue: %s" % str(e)) - - - def close(self): - ''' Shutdown uploader thread. ''' - self.habitat_uploader_running = False - - -# -# Functions for uploading telemetry to Habitat -# - - -# CRC16 function -def crc16_ccitt(data): - """ - Calculate the CRC16 CCITT checksum of *data*. - (CRC16 CCITT: start 0xFFFF, poly 0x1021) - """ - crc16 = crcmod.predefined.mkCrcFun('crc-ccitt-false') - return hex(crc16(data))[2:].upper().zfill(4) - - -def telemetry_to_sentence(sonde_data, payload_callsign="RADIOSONDE", comment=None): - ''' Convert a telemetry data dictionary into a UKHAS-compliant telemetry sentence ''' - # RS produces timestamps with microseconds on the end, we only want HH:MM:SS for uploading to habitat. - data_datetime = datetime.datetime.strptime(sonde_data['datetime_str'],"%Y-%m-%dT%H:%M:%S.%f") - short_time = data_datetime.strftime("%H:%M:%S") - - sentence = "$$%s,%d,%s,%.5f,%.5f,%d,%.1f,%.1f,%.1f" % (payload_callsign,sonde_data['frame'],short_time,sonde_data['lat'], - sonde_data['lon'],int(sonde_data['alt']),sonde_data['vel_h'], sonde_data['temp'], sonde_data['humidity']) - - # Add on a comment field if provided - note that this will result in a different habitat payload doc being required. - if comment != None: - comment = comment.replace(',','_') - sentence += "," + comment - - checksum = crc16_ccitt(sentence[2:]) - output = sentence + "*" + checksum + "\n" - return output - - -def habitat_upload_payload_telemetry(uploader, telemetry, payload_callsign = "RADIOSONDE", callsign="N0CALL", comment=None): - ''' Add a packet of radiosonde telemetry to the Habitat uploader queue. ''' - - sentence = telemetry_to_sentence(telemetry, payload_callsign = payload_callsign, comment=comment) - - try: - uploader.add(sentence) - except Exception as e: - logging.error("Could not add telemetry to Habitat Uploader - %s" % str(e)) - -# -# Functions for uploading a listener position to Habitat. -# from https://raw.githubusercontent.com/rossengeorgiev/hab-tools/master/spot2habitat_chase.py -# -callsign_init = False -url_habitat_uuids = "http://habitat.habhub.org/_uuids?count=%d" -url_habitat_db = "http://habitat.habhub.org/habitat/" -url_check_callsign = "http://spacenear.us/tracker/datanew.php?mode=6hours&type=positions&format=json&max_positions=10&position_id=0&vehicle=%s" -uuids = [] - - -def check_callsign(callsign, timeout=10): - ''' - Check if a payload document exists for a given callsign. - - This is done in a bit of a hack-ish way at the moment. We just check to see if there have - been any reported packets for the payload callsign on the tracker. - This should really be replaced with the correct call into the habitat tracker. - ''' - global url_check_callsign - - # Perform the request - _r = requests.get(url_check_callsign % callsign, timeout=timeout) - - try: - # Read the response in as JSON - _r_json = _r.json() - - # Read out the list of positions for the requested callsign - _positions = _r_json['positions']['position'] - - # If there is at least one position returned, we assume there is a valid payload document. - if len(_positions) > 0: - logging.info("Callsign %s already present in Habitat DB, not creating new payload doc." % callsign) - return True - else: - # Otherwise, we don't, and go create one. - return False - - except Exception as e: - # Handle errors with JSON parsing. - logging.error("Unable to request payload positions from spacenear.us - %s" % str(e)) - return False - - - -# Keep an internal cache for which payload docs we've created so we don't spam couchdb with updates -payload_config_cache = {} - - -def ISOStringNow(): - return "%sZ" % datetime.datetime.utcnow().isoformat() - - -def initPayloadDoc(serial, description="Meteorology Radiosonde", frequency=401500000, timeout=20): - """Creates a payload in Habitat for the radiosonde before uploading""" - global url_habitat_db - global payload_config_cache - - # First, check if the payload's serial number is already in our local cache. - if serial in payload_config_cache: - return payload_config_cache[serial] - - # Next, check to see if the payload has been observed on the online tracker already. - _callsign_present = check_callsign(serial) - - if _callsign_present: - # Add the callsign to the local cache. - payload_config_cache[serial] = serial - return - - # Otherwise, proceed to creating a new payload document. - - payload_data = { - "type": "payload_configuration", - "name": serial, - "time_created": ISOStringNow(), - "metadata": { - "description": description - }, - "transmissions": [ - { - "frequency": frequency, # Currently a dummy value. - "modulation": "RTTY", - "mode": "USB", - "encoding": "ASCII-8", - "parity": "none", - "stop": 2, - "shift": 350, - "baud": 50, - "description": "DUMMY ENTRY, DATA IS VIA radiosonde_auto_rx" - } - ], - "sentences": [ - { - "protocol": "UKHAS", - "callsign": serial, - "checksum":"crc16-ccitt", - "fields":[ - { - "name": "sentence_id", - "sensor": "base.ascii_int" - }, - { - "name": "time", - "sensor": "stdtelem.time" - }, - { - "name": "latitude", - "sensor": "stdtelem.coordinate", - "format": "dd.dddd" - }, - { - "name": "longitude", - "sensor": "stdtelem.coordinate", - "format": "dd.dddd" - }, - { - "name": "altitude", - "sensor": "base.ascii_int" - }, - { - "name": "speed", - "sensor": "base.ascii_float" - }, - { - "name": "temperature_external", - "sensor": "base.ascii_float" - }, - { - "name": "humidity", - "sensor": "base.ascii_float" - }, - { - "name": "comment", - "sensor": "base.string" - } - ], - "filters": - { - "post": [ - { - "filter": "common.invalid_location_zero", - "type": "normal" - } - ] - }, - "description": "radiosonde_auto_rx to Habitat Bridge" - } - ] - } - - # Perform the POST request to the Habitat DB. - try: - _r = requests.post(url_habitat_db, json=payload_data, timeout=timeout) - - if _r.json()['ok'] is True: - logging.info("Habitat - Created a payload document for %s" % serial) - payload_config_cache[serial] = _r.json() - else: - logging.error("Habitat - Failed to create a payload document for %s" % serial) - - except Exception as e: - logging.error("Habitat - Failed to create a payload document for %s - %s" % (serial, str(e))) - - - -def postListenerData(doc, timeout=10): - global uuids, url_habitat_db - # do we have at least one uuid, if not go get more - if len(uuids) < 1: - fetchUuids() - - # Attempt to add UUID and time data to document. - try: - doc['_id'] = uuids.pop() - except IndexError: - logging.error("Habitat - Unable to post listener data - no UUIDs available.") - return False - - doc['time_uploaded'] = ISOStringNow() - - try: - _r = requests.post(url_habitat_db, json=doc, timeout=timeout) - return True - except Exception as e: - logging.error("Habitat - Could not post listener data - %s" % str(e)) - return False - - -def fetchUuids(timeout=10): - global uuids, url_habitat_uuids - - _retries = 5 - - while _retries > 0: - try: - _r = requests.get(url_habitat_uuids % 10, timeout=timeout) - uuids.extend(_r.json()['uuids']) - logging.debug("Habitat - Got UUIDs") - return - except Exception as e: - logging.error("Habitat - Unable to fetch UUIDs, retrying in 10 seconds - %s" % str(e)) - time.sleep(10) - _retries = _retries - 1 - continue - - logging.error("Habitat - Gave up trying to get UUIDs.") - return - - -def initListenerCallsign(callsign, version=''): - doc = { - 'type': 'listener_information', - 'time_created' : ISOStringNow(), - 'data': { - 'callsign': callsign, - 'antenna': '', - 'radio': 'radiosonde_auto_rx %s' % version, - } - } - - resp = postListenerData(doc) - - if resp is True: - logging.debug("Habitat - Listener Callsign Initialized.") - return True - else: - logging.error("Habitat - Unable to initialize callsign.") - return False - - -def uploadListenerPosition(callsign, lat, lon, version=''): - """ Initializer Listener Callsign, and upload Listener Position """ - - # Attempt to initialize the listeners callsign - resp = initListenerCallsign(callsign, version=version) - # If this fails, it means we can't contact the Habitat server, - # so there is no point continuing. - if resp is False: - return - - doc = { - 'type': 'listener_telemetry', - 'time_created': ISOStringNow(), - 'data': { - 'callsign': callsign, - 'chase': False, - 'latitude': lat, - 'longitude': lon, - 'altitude': 0, - 'speed': 0, - } - } - - # post position to habitat - resp = postListenerData(doc) - if resp is True: - logging.info("Habitat - Listener information uploaded.") - else: - logging.error("Habitat - Unable to upload listener information.") - diff --git a/auto_rx/horizon_calc.py b/auto_rx/horizon_calc.py deleted file mode 100644 index 5acceb0..0000000 --- a/auto_rx/horizon_calc.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env python -# -# Radiosonde Auto RX Tools -# Process last_position.txt and determine effective radio horizon -# -# 2017-05 Mark Jessop -# - -from math import radians, degrees, sin, cos, atan2, sqrt, pi -import sys -import numpy as np -import matplotlib.pyplot as plt - -# SET YOUR LOCATION HERE. -my_lat = 0.0 -my_lon = 0.0 -my_alt = 0.0 - -# Earthmaths code by Daniel Richman (thanks!) -# Copyright 2012 (C) Daniel Richman; GNU GPL 3 -def position_info(listener, balloon): - """ - Calculate and return information from 2 (lat, lon, alt) tuples - - Returns a dict with: - - - angle at centre - - great circle distance - - distance in a straight line - - bearing (azimuth or initial course) - - elevation (altitude) - - Input and output latitudes, longitudes, angles, bearings and elevations are - in degrees, and input altitudes and output distances are in meters. - """ - - # Earth: - #radius = 6371000.0 - radius = 6364963.0 # Optimized for Australia :-) - - (lat1, lon1, alt1) = listener - (lat2, lon2, alt2) = balloon - - lat1 = radians(lat1) - lat2 = radians(lat2) - lon1 = radians(lon1) - lon2 = radians(lon2) - - # Calculate the bearing, the angle at the centre, and the great circle - # distance using Vincenty's_formulae with f = 0 (a sphere). See - # http://en.wikipedia.org/wiki/Great_circle_distance#Formulas and - # http://en.wikipedia.org/wiki/Great-circle_navigation and - # http://en.wikipedia.org/wiki/Vincenty%27s_formulae - d_lon = lon2 - lon1 - sa = cos(lat2) * sin(d_lon) - sb = (cos(lat1) * sin(lat2)) - (sin(lat1) * cos(lat2) * cos(d_lon)) - bearing = atan2(sa, sb) - aa = sqrt((sa ** 2) + (sb ** 2)) - ab = (sin(lat1) * sin(lat2)) + (cos(lat1) * cos(lat2) * cos(d_lon)) - angle_at_centre = atan2(aa, ab) - great_circle_distance = angle_at_centre * radius - - # Armed with the angle at the centre, calculating the remaining items - # is a simple 2D triangley circley problem: - - # Use the triangle with sides (r + alt1), (r + alt2), distance in a - # straight line. The angle between (r + alt1) and (r + alt2) is the - # angle at the centre. The angle between distance in a straight line and - # (r + alt1) is the elevation plus pi/2. - - # Use sum of angle in a triangle to express the third angle in terms - # of the other two. Use sine rule on sides (r + alt1) and (r + alt2), - # expand with compound angle formulae and solve for tan elevation by - # dividing both sides by cos elevation - ta = radius + alt1 - tb = radius + alt2 - ea = (cos(angle_at_centre) * tb) - ta - eb = sin(angle_at_centre) * tb - elevation = atan2(ea, eb) - - # Use cosine rule to find unknown side. - distance = sqrt((ta ** 2) + (tb ** 2) - 2 * tb * ta * cos(angle_at_centre)) - - # Give a bearing in range 0 <= b < 2pi - if bearing < 0: - bearing += 2 * pi - - return { - "listener": listener, "balloon": balloon, - "listener_radians": (lat1, lon1, alt1), - "balloon_radians": (lat2, lon2, alt2), - "angle_at_centre": degrees(angle_at_centre), - "angle_at_centre_radians": angle_at_centre, - "bearing": degrees(bearing), - "bearing_radians": bearing, - "great_circle_distance": great_circle_distance, - "straight_distance": distance, - "elevation": degrees(elevation), - "elevation_radians": elevation - } - -if __name__ == '__main__': - # Read in last_position.txt line by line. - f = open('last_positions.txt','r') - - azimuths = [] - elevations = [] - slant_ranges = [] - - for line in f: - if 'Last Position:' in line: - try: - last_lat = float(line.split(',')[0].split(' ')[2]) - last_lon = float(line.split(',')[1]) - last_alt = float(line.split(',')[2].split(' ')[1]) - - pos_data = position_info( (my_lat, my_lon, my_alt), (last_lat, last_lon, last_alt)) - - azimuths.append(pos_data['bearing']) - elevations.append(pos_data['elevation']) - slant_ranges.append(pos_data['straight_distance']) - except: - pass - - f.close() - - # Plot - plt.scatter(azimuths, elevations) - plt.xlabel('Bearing (degrees)') - plt.ylabel('Elevation (degrees)') - plt.show() - - diff --git a/auto_rx/ozi_utils.py b/auto_rx/ozi_utils.py deleted file mode 100644 index 5ad32aa..0000000 --- a/auto_rx/ozi_utils.py +++ /dev/null @@ -1,113 +0,0 @@ -# OziPlotter push utils for Sonde auto RX. - -import socket -import json - - -# Network Settings -HORUS_UDP_PORT = 55672 -HORUS_OZIPLOTTER_PORT = 8942 - -def send_payload_summary(callsign, latitude, longitude, altitude, packet_time, speed=-1, heading=-1, comment= '', model='', freq=401.0, temp=-273, udp_port = HORUS_UDP_PORT): - """ - Send an update on the core payload telemetry statistics into the network via UDP broadcast. - This can be used by other devices hanging off the network to display vital stats about the payload. - """ - packet = { - 'type' : 'PAYLOAD_SUMMARY', - 'callsign' : callsign, - 'latitude' : latitude, - 'longitude' : longitude, - 'altitude' : altitude, - 'speed' : speed, - 'heading': heading, - 'time' : packet_time, - 'comment' : comment, - # Additional fields specifically for radiosondes - 'model': model, - 'freq': freq, - 'temp': temp - } - - # Set up our UDP socket - s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM) - s.settimeout(1) - # Set up socket for broadcast, and allow re-use of the address - s.setsockopt(socket.SOL_SOCKET,socket.SO_BROADCAST,1) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - try: - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - except: - pass - s.bind(('',HORUS_UDP_PORT)) - try: - s.sendto(json.dumps(packet), ('', udp_port)) - except socket.error: - s.sendto(json.dumps(packet), ('127.0.0.1', udp_port)) - - -# The new 'generic' OziPlotter upload function, with no callsign, or checksumming (why bother, really) -def oziplotter_upload_basic_telemetry(time, latitude, longitude, altitude, hostname="192.168.88.2", udp_port = HORUS_OZIPLOTTER_PORT, broadcast=True): - """ - Send a sentence of position data to Oziplotter, via UDP. - """ - sentence = "TELEMETRY,%s,%.5f,%.5f,%d\n" % (time, latitude, longitude, altitude) - - try: - ozisock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM) - if broadcast: - # Set up socket for broadcast, and allow re-use of the address - ozisock.setsockopt(socket.SOL_SOCKET,socket.SO_BROADCAST,1) - ozisock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - try: - ozisock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - except: - pass - ozisock.sendto(sentence,('',udp_port)) - else: - # Otherwise, send to a user-defined hostname/port. - ozisock.sendto(sentence,(hostname,udp_port)) - - ozisock.close() - return sentence - except Exception as e: - print("Failed to send to Ozi: %s" % str(e)) - - - -def push_telemetry_to_ozi(telemetry, hostname='127.0.0.1', udp_port = HORUS_OZIPLOTTER_PORT): - """ - Grab the relevant fields from the incoming telemetry dictionary, and pass onto oziplotter. - """ - oziplotter_upload_basic_telemetry(telemetry['short_time'], telemetry['lat'], telemetry['lon'], telemetry['alt'], hostname=hostname, udp_port=udp_port) - - -def push_payload_summary(telemetry, udp_port = HORUS_UDP_PORT): - """ - Extract the needed data from a telemetry dictionary, and send out a payload summary packet. - """ - - # Prepare heading & speed fields, if they are provided in the incoming telemetry blob. - if 'heading' in telemetry.keys(): - _heading = telemetry['heading'] - else: - _heading = -1 - - if 'vel_h' in telemetry.keys(): - _speed = telemetry['vel_h']*3.6 - else: - _speed = -1 - - send_payload_summary(telemetry['id'], - telemetry['lat'], - telemetry['lon'], - telemetry['alt'], - telemetry['short_time'], - heading=_heading, - speed=_speed, - model=telemetry['type'], - freq=telemetry['freq'], - temp=telemetry['temp'], - comment='radiosonde', - udp_port=udp_port) - diff --git a/auto_rx/rotator_utils.py b/auto_rx/rotator_utils.py deleted file mode 100644 index bbf604e..0000000 --- a/auto_rx/rotator_utils.py +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env python -# -# Radiosonde Auto RX Tools - Rotator Control -# -# 2017-12 Mark Jessop -# -import socket -import logging -import traceback -import time -import numpy as np -from math import radians, degrees, sin, cos, atan2, sqrt, pi - -# Earthmaths code by Daniel Richman (thanks!) -# Copyright 2012 (C) Daniel Richman; GNU GPL 3 -def position_info(listener, balloon): - """ - Calculate and return information from 2 (lat, lon, alt) tuples - - Returns a dict with: - - - angle at centre - - great circle distance - - distance in a straight line - - bearing (azimuth or initial course) - - elevation (altitude) - - Input and output latitudes, longitudes, angles, bearings and elevations are - in degrees, and input altitudes and output distances are in meters. - """ - - # Earth: - radius = 6371000.0 - - (lat1, lon1, alt1) = listener - (lat2, lon2, alt2) = balloon - - lat1 = radians(lat1) - lat2 = radians(lat2) - lon1 = radians(lon1) - lon2 = radians(lon2) - - # Calculate the bearing, the angle at the centre, and the great circle - # distance using Vincenty's_formulae with f = 0 (a sphere). See - # http://en.wikipedia.org/wiki/Great_circle_distance#Formulas and - # http://en.wikipedia.org/wiki/Great-circle_navigation and - # http://en.wikipedia.org/wiki/Vincenty%27s_formulae - d_lon = lon2 - lon1 - sa = cos(lat2) * sin(d_lon) - sb = (cos(lat1) * sin(lat2)) - (sin(lat1) * cos(lat2) * cos(d_lon)) - bearing = atan2(sa, sb) - aa = sqrt((sa ** 2) + (sb ** 2)) - ab = (sin(lat1) * sin(lat2)) + (cos(lat1) * cos(lat2) * cos(d_lon)) - angle_at_centre = atan2(aa, ab) - great_circle_distance = angle_at_centre * radius - - # Armed with the angle at the centre, calculating the remaining items - # is a simple 2D triangley circley problem: - - # Use the triangle with sides (r + alt1), (r + alt2), distance in a - # straight line. The angle between (r + alt1) and (r + alt2) is the - # angle at the centre. The angle between distance in a straight line and - # (r + alt1) is the elevation plus pi/2. - - # Use sum of angle in a triangle to express the third angle in terms - # of the other two. Use sine rule on sides (r + alt1) and (r + alt2), - # expand with compound angle formulae and solve for tan elevation by - # dividing both sides by cos elevation - ta = radius + alt1 - tb = radius + alt2 - ea = (cos(angle_at_centre) * tb) - ta - eb = sin(angle_at_centre) * tb - elevation = atan2(ea, eb) - - # Use cosine rule to find unknown side. - distance = sqrt((ta ** 2) + (tb ** 2) - 2 * tb * ta * cos(angle_at_centre)) - - # Give a bearing in range 0 <= b < 2pi - if bearing < 0: - bearing += 2 * pi - - return { - "listener": listener, "balloon": balloon, - "listener_radians": (lat1, lon1, alt1), - "balloon_radians": (lat2, lon2, alt2), - "angle_at_centre": degrees(angle_at_centre), - "angle_at_centre_radians": angle_at_centre, - "bearing": degrees(bearing), - "bearing_radians": bearing, - "great_circle_distance": great_circle_distance, - "straight_distance": distance, - "elevation": degrees(elevation), - "elevation_radians": elevation - } - -def update_rotctld(hostname='127.0.0.1', port=4533, azimuth=0.0, elevation=0.0): - ''' - Attempt to push an azimuth & elevation position command into rotctld. - We take a fairly simplistic approach to this, and don't attempt to read the current - rotator position. - ''' - - # Bound Azimuth & Elevation to 0-360 / 0-90 - elevation = np.clip(elevation,0,90) - azimuth = azimuth % 360.0 - - try: - # Connect to rotctld. - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.settimeout(1) - s.connect((hostname,port)) - - # Produce rotctld command. - msg = "P %.1f %.1f\n" % (azimuth, elevation) - logging.debug("Rotctld - Sending command: %s" % msg) - # Send. - s.send(msg) - # Listen for a response. - resp = s.recv(1024) - # Close socket. - s.close() - - # - if 'RPRT 0' in resp: - logging.info("Rotctld - Commanded rotator to %.1f, %.1f." % (azimuth, elevation)) - return True - elif 'RPRT -1' in resp: - logging.warning("Rotctld - rotctld reported an error (RPRT -1).") - return False - else: - logging.warning("Rotctld - Unknown or no response from rotctld.") - return False - except: - logging.error("Rotctld - Connection Error: %s" % traceback.format_exc()) - -if __name__ == "__main__": - # Test script, to poke some values into rotctld. - logging.basicConfig(level=logging.DEBUG) - - az_range = np.linspace(0,360,10) - el_range = np.linspace(0,90,10) - - for i in range(0,len(az_range)): - update_rotctld(azimuth=az_range[i], elevation=el_range[i]) - time.sleep(10) - diff --git a/auto_rx/station.cfg.example b/auto_rx/station.cfg.example index 503ad29..5dd59dd 100644 --- a/auto_rx/station.cfg.example +++ b/auto_rx/station.cfg.example @@ -1,5 +1,5 @@ # -# Radiosonde Auto RX Station Configuration File +# Radiosonde Auto RX v2 Station Configuration File # # Copy this file to station.cfg and modify as required. # @@ -15,43 +15,51 @@ per_sonde_log = True sdr_fm_path = rtl_fm sdr_power_path = rtl_power -# PPM Frequency Correction (ppm offset) -# Refer here for a method of determining this correction: https://gist.github.com/darksidelemm/b517e6a9b821c50c170f1b9b7d65b824 -sdr_ppm = 0 +# Number of RTLSDRs to use. +# If more than one RTLSDR is in use, multiple [sdr_X] sections must be populated below +sdr_quantity = 1 + +# Individual SDR Settings. +[sdr_1] +# Device Index / Serial +# If using a single RTLSDR, set this value to 0 +# If using multiple SDRs, you MUST allocate each SDR a unique serial number using rtl_eeprom +# i.e. to set the serial number of a (single) connected RTLSDR: rtl_eeprom -s 00000002 +# Then set the device_idx below to 00000002, and repeat for the other [sdr_n] sections below +device_idx = 0 + +# Frequency Correction (ppm offset) +# Refer here for a method of determining this correction: https://gist.github.com/darksidelemm/b517e6a9b821c50c170f1b9b7d65b824 +ppm = 0 + # SDR Gain Setting -# Gain settings can generally range between 0dB and 30dB depending on the tuner in use. +# Gain settings can generally range between 0dB and 40dB depending on the tuner in use. # Run rtl_test to confirm what gain settings are available, or use a value of -1 to use automatic gain control. # Note that this is an overall gain value, not an individual mixer/tuner gain. This is a limitation of the rtl_power/rtl_fm utils. -sdr_gain = -1 -# Enable RTLSDR Bias Tee (for v3 Dongles) -# Requires a recent version of rtl-sdr to be installed (needs the -T option) -sdr_bias = False +gain = -1 + +# Bias Tee - Enable the bias tee in the RTLSDR v3 Dongles. +bias = False + +[sdr_2] +# As above, for the next SDR. Note the warning about serial numbers. +device_idx = 00000002 +ppm = 0 +gain = -1 +bias = False + +# Add more SDR definitions here if needed. + # Radiosonde Search Parameters [search_params] -# Number of times to scan before quitting (Deprecated?) -search_attempts = 10 -# Wait time between failed scans. -search_delay = 10 - # Minimum and maximum search frequencies, in MHz. # Australia: Use 400.05 - 403 MHz # Europe: Use 400.05 - 406 MHz min_freq = 400.05 max_freq = 403.0 - -# Receive bin width (Hz) -search_step = 800 -# Minimum SNR for a peak to be detected. The lower the number, the more peaks detected. -min_snr = 10 -# Minimum distance between peaks (Hz) -min_distance = 1000 -# Dwell time - How long to wait for a sonde detection on each peak. -dwell_time = 5 -# Quantize search results to x Hz steps. Useful as most sondes are on 10 kHz frequency steps. -quantization = 10000 -# Timeout and re-scan after X seconds of no data. -rx_timeout = 120 +# Have the decoder timeout after X seconds of no valid data. +rx_timeout = 180 # Frequency Lists - These must be provided as JSON-compatible lists of floats (in MHz), i.e. [400.50, 401.520, 403.200] @@ -67,50 +75,6 @@ blacklist = [] # This is useful when you know the regular frequency of a local sonde, but still want to allow detections on other frequencies. greylist = [] -# Station Location (optional). Used by the Habitat Uploader, and by Rotator Control -[location] -station_lat = 0.0 -station_lon = 0.0 -station_alt = 0.0 - -# Upload settings. Used by both the internet upload threads, and the rotator updater. -[upload] -# Upload/update every x seconds -upload_rate = 30 - -# Enable upload to various services. -# Uploading to APRS. Change settings in [aprs] block below. -# PLEASE READ WARNING HERE BEFORE ENABLING: https://github.com/projecthorus/radiosonde_auto_rx/wiki/Configuration-Settings#uploading-to-aprs-is -enable_aprs = False -# Uploading to Habitat. PLEASE CHANGE uploader_callsign IN [habitat] BLOCK BELOW BEFORE ENABLING THIS -enable_habitat = False - -# Upload when (seconds_since_utc_epoch%upload_rate) == 0. Otherwise just delay upload_rate seconds between uploads. -# Setting this to True with multple uploaders should give a higher chance of all uploaders uploading the same frame, -# however the upload_rate should not be set too low, else there may be a chance of missing upload slots. -synchronous_upload = True - -# Settings for uploading to APRS-IS -[aprs] -# APRS-IS Login Information -aprs_user = N0CALL -aprs_pass = 00000 - -# APRS-IS server to upload to. -aprs_server = rotate.aprs2.net - -# Object name to be used when uploading to APRS-IS (Max 9 chars) -# Should be either a callsign with a -11 or -12 suffix (i.e. N0CALL-12), -# or , which will be replaced with the radiosondes serial number -aprs_object_id = - -# The APRS-IS beacon comment. The following fields can be included: -# - Sonde Frequency, i.e. 401.520 MHz -# - Sonde Type (RS94/RS41) -# - Sonde Serial Number (i.e. M1234567) -# - Sonde Vertical Velocity (i.e. -5.1m/s) -# - Sonde reported temperature. If no temp data available, this will report -273 degC. Only works for RS41s. -aprs_custom_comment = Radiosonde Auto-RX # Settings for uploading to the Habitat HAB tracking database ( https://tracker.habhub.org/ ) @@ -120,21 +84,67 @@ aprs_custom_comment = Radiosonde Auto-RX # If you use a custom payload callsign, you will need to create an appropriate payload document for it to appear on the map # [habitat] -# Payload callsign - if set to will use the serial number of the sonde and create a payload document automatically -payload_callsign = -payload_description = Meteorological Radiosonde +habitat_enabled = False + # Uploader callsign, as shown above. PLEASE CHANGE THIS TO SOMETHING UNIQUE. -uploader_callsign = SONDE_AUTO_RX +uploader_callsign = CHANGEME_AUTO_RX + # Upload listener position to Habitat? (So you show up on the map) -upload_listener_position = False +upload_listener_position = True + +# Habitat Upload Rate - Upload a packet every X seconds. +upload_rate = 30 + +# Payload callsign - if set to will use the serial number of the sonde and create a payload document automatically +# WARNING - If running multiple RTLSDRs, setting this to a fixed callsign will result in odd behaviour on the online tracker. +# DO NOT SET THIS TO ANYTHING OTHER THAN IF YOU ARE USING MORE THAN ONE SDR! +payload_callsign = + + +# Station Location (optional). Used by the Habitat Uploader, and by Rotator Control +[location] +station_lat = 0.0 +station_lon = 0.0 +station_alt = 0.0 + + +# Settings for uploading to APRS-IS +[aprs] +aprs_enabled = False +# APRS-IS Login Information +aprs_user = N0CALL +aprs_pass = 00000 + +# APRS Upload Rate - Upload a packet every X seconds. +upload_rate = 30 + +# APRS-IS server to upload to. +aprs_server = rotate.aprs2.net + +# Object name to be used when uploading to APRS-IS (Max 9 chars) +# Should be either a callsign with a -11 or -12 suffix (i.e. N0CALL-12), +# or , which will be replaced with the radiosondes serial number +# WARNING - If running multiple RTLSDRs, setting this to a fixed callsign will result in odd behaviour on the online tracker. +# DO NOT SET THIS TO ANYTHING OTHER THAN IF YOU ARE USING MORE THAN ONE SDR! +aprs_object_id = + +# The APRS-IS beacon comment. The following fields can be included: +# - Sonde Frequency, i.e. 401.520 MHz +# - Sonde Type (RS94/RS41) +# - Sonde Serial Number (i.e. M1234567) +# - Sonde Vertical Velocity (i.e. -5.1m/s) +# - Sonde reported temperature. If no temp data available, this will report -273 degC. Only works for RS41s. +aprs_custom_comment = Radiosonde http://bit.ly/2Bj4Sfk # Settings for pushing data into OziPlotter # Oziplotter receives data via a basic CSV format, via UDP. [oziplotter] +# WARNING - This should not be enabled in a multi-SDR configuration, as OziExplorer currently has no way of differentiating +# between sonde IDs. ozi_enabled = False ozi_update_rate = 5 -# Broadcast UDP port - Set this to 55681 if sending data to OziMux, or 8942 if sending directly to OziPlotter +# UDP Broadcast output port. ozi_port = 55681 # Payload summary output, which can be used by a few of the Horus Ground Station tools payload_summary_enabled = False @@ -151,19 +161,43 @@ max_altitude = 50000 # Discard positions more than 1000 km from the observation station location (if set) max_radius_km = 1000 -# MQTT (Even more interfacing options!) -# Post all sonde messages to a MQTT server -[mqtt] -mqtt_enabled = False -mqtt_hostname = 127.0.0.1 -mqtt_port = 1883 +# Advanced Settings +# These control low-level settings within various modules. +# Playing with them may result in odd behaviour. +[advanced] +# Scanner - Receive bin width (Hz) +search_step = 800 +# Scanner - Minimum SNR for a peak to be detected. The lower the number, the more peaks detected. +snr_threshold = 10 +# Scanner - Maximum number of peaks to search through during a scan pass. +# Increase this if you have lots of spurious signals, though this will increase scan times. +max_peaks = 10 +# Scanner - Minimum distance between peaks (Hz) +min_distance = 1000 +# Scanner - Scan Dwell Time - How long to observe the specified spectrum for. +scan_dwell_time = 20 +# Scanner - Detection Dwell time - How long to wait for a sonde detection on each peak. +detect_dwell_time = 5 +# Scanner - Delay between scans. We should delay a short amount between scans to allow for decoders and other actions to jump in. +scan_delay = 10 +# Quantize search results to x Hz steps. Useful as most sondes are on 10 kHz frequency steps. +quantization = 10000 +# Upload when (seconds_since_utc_epoch%upload_rate) == 0. Otherwise just delay upload_rate seconds between uploads. +# Setting this to True with multple uploaders should give a higher chance of all uploaders uploading the same frame, +# however the upload_rate should not be set too low, else there may be a chance of missing upload slots. +synchronous_upload = True +# Only accept a payload ID as valid until it has been seen N times. +# This helps avoid corrupted callsigns getting onto the map. +payload_id_valid = 5 + # Rotator Settings # auto_rx can communicate with an instance of rotctld, on either the local machine or elsewhere on the network. -# The update rate is tied to the upload_rate setting above, though internet upload does not need to be enabled -# for the rotator to be updated. [rotator] -enable_rotator = False +# WARNING - This should not be enabled in a multi-SDR configuration. +rotator_enabled = False +# How often to update the rotator position. +update_rate = 10 # Hostname / Port of the rotctld instance. rotator_hostname = 127.0.0.1 rotator_port = 4533