2017-04-29 02:00:10 +00:00
#!/usr/bin/env python
#
# Radiosonde Auto RX Tools
#
# 2017-04 Mark Jessop <vk5qi@rfhead.net>
#
2017-12-17 01:44:20 +00:00
# Refer github page for instructions on setup and usage.
# https://github.com/projecthorus/radiosonde_auto_rx/
2017-04-29 14:47:26 +00:00
#
2017-04-29 02:00:10 +00:00
import numpy as np
2017-04-29 14:06:47 +00:00
import sys
2017-05-05 12:56:41 +00:00
import argparse
2017-04-29 14:06:47 +00:00
import logging
import datetime
import time
2017-05-05 12:56:41 +00:00
import os
2017-12-19 11:56:48 +00:00
import shutil
2017-12-17 01:44:20 +00:00
import platform
2017-05-05 12:56:41 +00:00
import signal
2017-04-29 14:06:47 +00:00
import Queue
import subprocess
import traceback
from aprs_utils import *
2017-05-08 07:04:33 +00:00
from habitat_utils import *
2017-07-16 10:11:13 +00:00
from ozi_utils import *
2017-12-17 05:22:36 +00:00
from rotator_utils import *
2017-04-29 14:06:47 +00:00
from threading import Thread
2017-04-29 02:00:10 +00:00
from StringIO import StringIO
from findpeaks import *
2017-05-05 12:56:41 +00:00
from config_reader import *
from gps_grabber import *
2017-05-06 23:42:46 +00:00
from async_file_reader import AsynchronousFileReader
2017-04-29 14:06:47 +00:00
2017-12-17 01:44:20 +00:00
# Logging level
2017-12-19 11:56:48 +00:00
# INFO = Basic status messages
# DEBUG = Adds information on each command run by subprocess.
2017-12-17 01:44:20 +00:00
logging_level = logging . INFO
2017-04-29 14:06:47 +00:00
2017-12-19 11:56:48 +00:00
# Set this to true to enable dumping of all the rtl_power output to files in ./log/
# Note that this can result in a LOT of log files being generated depending on your scanning settings.
uber_debug = False
2017-05-05 12:56:41 +00:00
# Internet Push Globals
APRS_OUTPUT_ENABLED = False
HABITAT_OUTPUT_ENABLED = False
2017-04-29 02:00:10 +00:00
2017-05-05 12:56:41 +00:00
INTERNET_PUSH_RUNNING = True
2017-05-07 07:32:56 +00:00
internet_push_queue = Queue . Queue ( )
2017-04-29 14:06:47 +00:00
2017-07-16 10:11:13 +00:00
# Second Queue for OziPlotter outputs, since we want this to run at a faster rate.
OZI_PUSH_RUNNING = True
ozi_push_queue = Queue . Queue ( )
2017-05-08 07:04:33 +00:00
# Flight Statistics data
# stores copies of the telemetry dictionary returned by process_rs_line.
flight_stats = {
' first ' : None ,
' apogee ' : None ,
' last ' : None
}
2017-04-29 14:06:47 +00:00
2017-12-17 01:44:20 +00:00
def run_rtl_power ( start , stop , step , filename = " log_power.csv " , dwell = 20 , ppm = 0 , gain = - 1 , bias = False ) :
2017-04-30 04:41:09 +00:00
""" Run rtl_power, with a timeout """
2017-12-18 07:31:37 +00:00
# Example: rtl_power -T -f 400400000:403500000:800 -i20 -1 -c 20% -p 0 -g 26.0 log_power.csv
2017-05-06 23:42:46 +00:00
2017-07-16 10:11:13 +00:00
# Add a -T option if bias is enabled
bias_option = " -T " if bias else " "
2017-12-17 01:44:20 +00:00
# Add a gain parameter if we have been provided one.
if gain != - 1 :
gain_param = ' -g %.1f ' % gain
else :
gain_param = ' '
# Add -k 30 option, to SIGKILL rtl_power 30 seconds after the regular timeout expires.
# Note that this only works with the GNU Coreutils version of Timeout, not the IBM version,
# which is provided with OSX (Darwin).
if ' Darwin ' in platform . platform ( ) :
timeout_kill = ' '
else :
timeout_kill = ' -k 30 '
rtl_power_cmd = " timeout %s %d rtl_power %s -f %d : %d : %d -i %d -1 -c 20 %% -p %d %s %s " % ( timeout_kill , dwell + 10 , bias_option , start , stop , step , dwell , int ( ppm ) , gain_param , filename )
2017-04-29 14:06:47 +00:00
logging . info ( " Running frequency scan. " )
2017-12-17 01:44:20 +00:00
logging . debug ( " Running command: %s " % rtl_power_cmd )
2017-05-05 12:56:41 +00:00
ret_code = os . system ( rtl_power_cmd )
2017-04-29 14:06:47 +00:00
if ret_code == 1 :
logging . critical ( " rtl_power call failed! " )
2017-07-16 10:11:13 +00:00
return False
2017-04-29 14:06:47 +00:00
else :
return True
2017-04-29 02:00:10 +00:00
def read_rtl_power ( filename ) :
2017-04-30 04:41:09 +00:00
""" Read in frequency samples from a single-shot log file produced by rtl_power """
2017-04-29 14:06:47 +00:00
# Output buffers.
freq = np . array ( [ ] )
power = np . array ( [ ] )
freq_step = 0
# Open file.
f = open ( filename , ' r ' )
# rtl_power log files are csv's, with the first 6 fields in each line describing the time and frequency scan parameters
# for the remaining fields, which contain the power samples.
for line in f :
# Split line into fields.
fields = line . split ( ' , ' )
if len ( fields ) < 6 :
logging . error ( " Invalid number of samples in input file - corrupt? " )
raise Exception ( " Invalid number of samples in input file - corrupt? " )
start_date = fields [ 0 ]
start_time = fields [ 1 ]
start_freq = float ( fields [ 2 ] )
stop_freq = float ( fields [ 3 ] )
freq_step = float ( fields [ 4 ] )
n_samples = int ( fields [ 5 ] )
2017-07-16 10:11:13 +00:00
#freq_range = np.arange(start_freq,stop_freq,freq_step)
2017-04-29 14:06:47 +00:00
samples = np . loadtxt ( StringIO ( " , " . join ( fields [ 6 : ] ) ) , delimiter = ' , ' )
2017-07-16 10:11:13 +00:00
freq_range = np . linspace ( start_freq , stop_freq , len ( samples ) )
2017-04-29 14:06:47 +00:00
# Add frequency range and samples to output buffers.
freq = np . append ( freq , freq_range )
power = np . append ( power , samples )
f . close ( )
2017-07-16 10:11:13 +00:00
# Sanitize power values, to remove the nan's that rtl_power puts in there occasionally.
power = np . nan_to_num ( power )
2017-04-29 14:06:47 +00:00
return ( freq , power , freq_step )
2017-04-30 04:41:09 +00:00
2017-04-29 14:06:47 +00:00
def quantize_freq ( freq_list , quantize = 5000 ) :
2017-04-30 04:41:09 +00:00
""" Quantise a list of frequencies to steps of <quantize> Hz """
2017-04-29 14:06:47 +00:00
return np . round ( freq_list / quantize ) * quantize
2017-04-29 02:00:10 +00:00
2017-12-17 01:44:20 +00:00
def detect_sonde ( frequency , ppm = 0 , gain = - 1 , bias = False , dwell_time = 10 ) :
2017-04-29 14:06:47 +00:00
""" Receive some FM and attempt to detect the presence of a radiosonde. """
2017-05-06 23:42:46 +00:00
2017-12-17 01:44:20 +00:00
# Example command (for command-line testing):
2017-12-18 07:31:37 +00:00
# rtl_fm -T -p 0 -M fm -g 26.0 -s 15k -f 401500000 | sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -t wav - highpass 20 | ./rs_detect -z -t 8
2017-12-17 01:44:20 +00:00
2017-07-16 10:11:13 +00:00
# Add a -T option if bias is enabled
bias_option = " -T " if bias else " "
2017-12-17 01:44:20 +00:00
# Add a gain parameter if we have been provided one.
if gain != - 1 :
gain_param = ' -g %.1f ' % gain
else :
gain_param = ' '
2017-12-18 07:31:37 +00:00
rx_test_command = " timeout %d s rtl_fm %s -p %d %s -M fm -F9 -s 15k -f %d 2>/dev/null | " % ( dwell_time , bias_option , int ( ppm ) , gain_param , frequency )
2017-04-29 14:06:47 +00:00
rx_test_command + = " sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -t wav - highpass 20 2>/dev/null | "
rx_test_command + = " ./rs_detect -z -t 8 2>/dev/null "
2017-04-29 02:00:10 +00:00
2017-04-29 14:06:47 +00:00
logging . info ( " Attempting sonde detection on %.3f MHz " % ( frequency / 1e6 ) )
2017-12-17 01:44:20 +00:00
logging . debug ( " Running command: %s " % rx_test_command )
2017-05-24 11:19:44 +00:00
2017-05-05 12:56:41 +00:00
ret_code = os . system ( rx_test_command )
2017-04-29 02:00:10 +00:00
2017-04-29 14:06:47 +00:00
ret_code = ret_code >> 8
2017-04-29 02:00:10 +00:00
2017-04-29 14:06:47 +00:00
if ret_code == 3 :
logging . info ( " Detected a RS41! " )
return " RS41 "
elif ret_code == 4 :
logging . info ( " Detected a RS92! " )
return " RS92 "
2017-12-17 01:44:20 +00:00
elif ret_code == 2 :
logging . info ( " Detected a DFM Sonde! (Unsupported) " )
return " DFM "
elif ret_code == 5 :
logging . info ( " Detected a M10 Sonde! (Unsupported) " )
return " M10 "
elif ret_code == 6 :
logging . info ( " Detected a iMet Sonde! (Unsupported) " )
return " iMet "
2017-04-29 14:06:47 +00:00
else :
return None
2017-04-29 02:00:10 +00:00
2017-07-16 10:11:13 +00:00
def reset_rtlsdr ( ) :
""" Attempt to perform a USB Reset on all attached RTLSDRs. This uses the usb_reset binary from ../scan """
lsusb_output = subprocess . check_output ( [ ' lsusb ' ] )
try :
devices = lsusb_output . split ( ' \n ' )
for device in devices :
if ' RTL2838 ' in device :
# Found an rtlsdr! Attempt to extract bus and device number.
# Expecting something like: 'Bus 001 Device 005: ID 0bda:2838 Realtek Semiconductor Corp. RTL2838 DVB-T'
device_fields = device . split ( ' ' )
# Attempt to cast fields to integers, to give some surety that we have the correct data.
device_bus = int ( device_fields [ 1 ] )
device_number = int ( device_fields [ 3 ] [ : - 1 ] )
# Construct device address
reset_argument = ' /dev/bus/usb/ %03d / %03d ' % ( device_bus , device_number )
# Attempt to reset the device.
logging . info ( " Resetting device: %s " % reset_argument )
ret_code = subprocess . call ( [ ' ./reset_usb ' , reset_argument ] )
logging . debug ( " Got return code: %s " % ret_code )
else :
continue
except :
logging . error ( " Errors occured while attempting to reset USB device. " )
2017-04-29 02:00:10 +00:00
2017-05-06 23:42:46 +00:00
def sonde_search ( config , attempts = 5 ) :
""" Perform a frequency scan across the defined range, and test each frequency for a radiosonde ' s presence. """
search_attempts = attempts
sonde_freq = None
sonde_type = None
while search_attempts > 0 :
2017-05-24 11:19:44 +00:00
2017-12-20 04:23:29 +00:00
if len ( config [ ' whitelist ' ] ) == 0 :
# No whitelist frequencies provided - perform a scan.
run_rtl_power ( config [ ' min_freq ' ] * 1e6 , config [ ' max_freq ' ] * 1e6 , config [ ' search_step ' ] , ppm = config [ ' rtlsdr_ppm ' ] , gain = config [ ' rtlsdr_gain ' ] , bias = config [ ' rtlsdr_bias ' ] )
# Read in result
try :
( freq , power , step ) = read_rtl_power ( ' log_power.csv ' )
# Sanity check results.
if step == 0 or len ( freq ) == 0 or len ( power ) == 0 :
raise Exception ( " Invalid file. " )
if uber_debug :
# Copy log_power.csv to log directory, for later debugging.
shutil . copy ( ' log_power.csv ' , ' ./log/log_power_ %s .csv ' % datetime . datetime . utcnow ( ) . strftime ( ' % Y- % m- %d _ % H % M % S ' ) )
except Exception as e :
traceback . print_exc ( )
logging . error ( " Failed to read log_power.csv. Resetting RTLSDRs and attempting to run rtl_power again. " )
# no log_power.csv usually means that rtl_power has locked up and had to be SIGKILL'd.
# This occurs when it can't get samples from the RTLSDR, because it's locked up for some reason.
# Issuing a USB Reset to the rtlsdr can sometimes solve this.
reset_rtlsdr ( )
search_attempts - = 1
time . sleep ( 10 )
continue
2017-05-06 23:42:46 +00:00
2017-07-18 12:39:55 +00:00
2017-12-20 04:23:29 +00:00
# Rough approximation of the noise floor of the received power spectrum.
power_nf = np . mean ( power )
2017-12-19 11:56:48 +00:00
2017-12-20 04:23:29 +00:00
# Detect peaks.
peak_indices = detect_peaks ( power , mph = ( power_nf + config [ ' min_snr ' ] ) , mpd = ( config [ ' min_distance ' ] / step ) , show = False )
2017-12-19 11:56:48 +00:00
2017-12-22 00:29:39 +00:00
# If we have found no peaks, and no greylist has been provided, re-scan.
if ( len ( peak_indices ) == 0 ) and ( len ( config [ ' greylist ' ] ) == 0 ) :
2017-12-20 04:23:29 +00:00
logging . info ( " No peaks found on this pass. " )
search_attempts - = 1
time . sleep ( 10 )
continue
2017-05-06 23:42:46 +00:00
2017-12-20 04:23:29 +00:00
# Sort peaks by power.
peak_powers = power [ peak_indices ]
peak_freqs = freq [ peak_indices ]
peak_frequencies = peak_freqs [ np . argsort ( peak_powers ) ] [ : : - 1 ]
2017-07-18 12:35:17 +00:00
2017-12-20 04:23:29 +00:00
# Quantize to nearest x kHz
peak_frequencies = quantize_freq ( peak_frequencies , config [ ' quantization ' ] )
2017-05-06 23:42:46 +00:00
2017-12-20 04:23:29 +00:00
# Append on any frequencies in the supplied greylist
peak_frequencies = np . append ( np . array ( config [ ' greylist ' ] ) * 1e6 , peak_frequencies )
2017-05-06 23:42:46 +00:00
2017-12-20 04:23:29 +00:00
# Remove any duplicate entries after quantization, but preserve order.
_ , peak_idx = np . unique ( peak_frequencies , return_index = True )
peak_frequencies = peak_frequencies [ np . sort ( peak_idx ) ]
# Remove any frequencies in the blacklist.
for _frequency in np . array ( config [ ' blacklist ' ] ) * 1e6 :
_index = np . argwhere ( peak_frequencies == _frequency )
peak_frequencies = np . delete ( peak_frequencies , _index )
2017-05-06 23:42:46 +00:00
2017-12-20 04:23:29 +00:00
logging . info ( " Performing scan on %d frequencies (MHz): %s " % ( len ( peak_frequencies ) , str ( peak_frequencies / 1e6 ) ) )
2017-05-06 23:42:46 +00:00
2017-12-20 04:23:29 +00:00
else :
# We have been provided a whitelist - scan through the supplied frequencies.
peak_frequencies = np . array ( config [ ' whitelist ' ] ) * 1e6
logging . info ( " Scanning on whitelist frequencies (MHz): %s " % str ( peak_frequencies / 1e6 ) )
2017-05-06 23:42:46 +00:00
# Run rs_detect on each peak frequency, to determine if there is a sonde there.
for freq in peak_frequencies :
2017-12-17 01:44:20 +00:00
detected = detect_sonde ( freq ,
ppm = config [ ' rtlsdr_ppm ' ] ,
gain = config [ ' rtlsdr_gain ' ] ,
bias = config [ ' rtlsdr_bias ' ] ,
dwell_time = config [ ' dwell_time ' ] )
2017-05-06 23:42:46 +00:00
if detected != None :
sonde_freq = freq
sonde_type = detected
break
if sonde_type != None :
# Found a sonde! Break out of the while loop and attempt to decode it.
return ( sonde_freq , sonde_type )
else :
# No sondes found :-( Wait and try again.
search_attempts - = 1
logging . warning ( " Search attempt failed, %d attempts remaining. Waiting %d seconds. " % ( search_attempts , config [ ' search_delay ' ] ) )
time . sleep ( config [ ' search_delay ' ] )
# If we get here, we have exhausted our search attempts.
logging . error ( " No sondes detected. " )
return ( None , None )
2017-04-30 09:23:19 +00:00
def process_rs_line ( line ) :
2017-04-30 04:41:09 +00:00
""" Process a line of output from the rs92gps decoder, converting it to a dict """
2017-04-30 09:23:19 +00:00
# Sample output:
# 0 1 2 3 4 5 6 7 8 9 10
# 106,M3553150,2017-04-30,05:44:40.460,-34.72471,138.69178,-263.83, 0.1,265.0,0.3,OK
2017-04-29 14:06:47 +00:00
try :
2017-04-29 02:00:10 +00:00
2017-12-05 09:55:57 +00:00
if line [ 0 ] != " { " :
2017-04-30 09:23:19 +00:00
return None
2017-04-29 02:00:10 +00:00
2017-12-05 09:55:57 +00:00
rs_frame = json . loads ( line )
2017-12-20 09:28:24 +00:00
# Note: We expect the following fields available within the JSON blob:
# id, frame, datetime, lat, lon, alt, crc
2017-12-05 09:55:57 +00:00
rs_frame [ ' crc ' ] = True # the rs92ecc only reports frames that match crc so we can lie here
rs_frame [ ' temp ' ] = 0.0 #we don't have this yet
2017-05-05 12:56:41 +00:00
rs_frame [ ' humidity ' ] = 0.0
2017-12-05 09:55:57 +00:00
rs_frame [ ' datetime_str ' ] = rs_frame [ ' datetime ' ] . replace ( " Z " , " " ) #python datetime sucks
2017-12-09 11:31:13 +00:00
rs_frame [ ' short_time ' ] = rs_frame [ ' datetime ' ] . split ( " . " ) [ 0 ] . split ( " T " ) [ 1 ]
2017-04-30 09:23:19 +00:00
2017-12-05 09:55:57 +00:00
logging . info ( " TELEMETRY: %s , %d , %s , %.5f , %.5f , %.1f , %s " % ( rs_frame [ ' id ' ] , rs_frame [ ' frame ' ] , rs_frame [ ' datetime ' ] , rs_frame [ ' lat ' ] , rs_frame [ ' lon ' ] , rs_frame [ ' alt ' ] , rs_frame [ ' crc ' ] ) )
2017-07-29 12:17:09 +00:00
2017-12-05 09:55:57 +00:00
return rs_frame
2017-04-29 02:00:10 +00:00
2017-04-29 14:06:47 +00:00
except :
logging . error ( " Could not parse string: %s " % line )
2017-04-30 09:23:19 +00:00
traceback . print_exc ( )
2017-04-29 14:06:47 +00:00
return None
2017-05-08 07:04:33 +00:00
def update_flight_stats ( data ) :
""" Maintain a record of flight statistics. """
global flight_stats
# Save the current frame into the 'last' frame storage
flight_stats [ ' last ' ] = data
# Is this our first telemetry frame?
# If so, populate all fields in the flight stats dict with the current telemetry frame.
if flight_stats [ ' first ' ] == None :
flight_stats [ ' first ' ] = data
flight_stats [ ' apogee ' ] = data
# Is the current altitude higher than the current peak altitude?
if data [ ' alt ' ] > flight_stats [ ' apogee ' ] [ ' alt ' ] :
flight_stats [ ' apogee ' ] = data
2017-07-16 10:11:13 +00:00
2017-05-08 07:04:33 +00:00
def calculate_flight_statistics ( ) :
""" Produce a flight summary, for inclusion in the log file. """
global flight_stats
# Grab peak altitude.
peak_altitude = flight_stats [ ' apogee ' ] [ ' alt ' ]
# Grab last known descent rate
descent_rate = flight_stats [ ' last ' ] [ ' vel_v ' ]
# Calculate average ascent rate, based on data we have.
# Wrap this in a try, in case we have time string parsing issues.
try :
2017-05-08 12:55:33 +00:00
if flight_stats [ ' first ' ] == flight_stats [ ' apogee ' ] :
# We have only caught a flight during descent. Don't calculate ascent rate.
ascent_rate = - 1.0
else :
ascent_height = flight_stats [ ' apogee ' ] [ ' alt ' ] - flight_stats [ ' first ' ] [ ' alt ' ]
start_time = datetime . datetime . strptime ( flight_stats [ ' first ' ] [ ' datetime_str ' ] , " % Y- % m- %d T % H: % M: % S. %f " )
apogee_time = datetime . datetime . strptime ( flight_stats [ ' apogee ' ] [ ' datetime_str ' ] , " % Y- % m- %d T % H: % M: % S. %f " )
ascent_time = ( apogee_time - start_time ) . seconds
ascent_rate = ascent_height / float ( ascent_time )
2017-05-08 07:04:33 +00:00
except :
ascent_rate = - 1.0
2017-05-08 13:57:53 +00:00
stats_str = " Acquired %s at %s on %s , at %d m altitude. \n " % ( flight_stats [ ' first ' ] [ ' type ' ] , flight_stats [ ' first ' ] [ ' datetime_str ' ] , flight_stats [ ' first ' ] [ ' freq ' ] , int ( flight_stats [ ' first ' ] [ ' alt ' ] ) )
2017-05-08 07:04:33 +00:00
stats_str + = " Ascent Rate: %.1f m/s, Peak Altitude: %d , Descent Rate: %.1f m/s \n " % ( ascent_rate , int ( peak_altitude ) , descent_rate )
stats_str + = " Last Position: %.5f , %.5f , %d m alt, at %s \n " % ( flight_stats [ ' last ' ] [ ' lat ' ] , flight_stats [ ' last ' ] [ ' lon ' ] , int ( flight_stats [ ' last ' ] [ ' alt ' ] ) , flight_stats [ ' last ' ] [ ' datetime_str ' ] )
2017-05-08 12:46:50 +00:00
stats_str + = " Flight Path: https://aprs.fi/#!call= %s &timerange=10800&tail=10800 \n " % flight_stats [ ' last ' ] [ ' id ' ]
2017-05-08 07:04:33 +00:00
return stats_str
2017-12-20 09:28:24 +00:00
def decode_rs92 ( frequency , ppm = 0 , gain = - 1 , bias = False , rx_queue = None , almanac = None , ephemeris = None , timeout = 120 , save_log = False ) :
2017-04-29 14:06:47 +00:00
""" Decode a RS92 sonde """
2017-07-16 10:11:13 +00:00
global latest_sonde_data , internet_push_queue , ozi_push_queue
2017-05-05 12:56:41 +00:00
# Before we get started, do we need to download GPS data?
if ephemeris == None :
# If no ephemeris data defined, attempt to download it.
# get_ephemeris will either return the saved file name, or None.
ephemeris = get_ephemeris ( destination = " ephemeris.dat " )
# If ephemeris is still None, then we failed to download the ephemeris data.
2017-05-06 23:42:46 +00:00
# Try and grab the almanac data instead
2017-05-05 12:56:41 +00:00
if ephemeris == None :
logging . error ( " Could not obtain ephemeris data, trying to download an almanac. " )
almanac = get_almanac ( destination = " almanac.txt " )
if almanac == None :
# We probably don't have an internet connection. Bomb out, since we can't do much with the sonde telemetry without an almanac!
logging . critical ( " Could not obtain GPS ephemeris or almanac data. " )
return False
2017-07-16 10:11:13 +00:00
# Add a -T option if bias is enabled
bias_option = " -T " if bias else " "
2017-12-17 01:44:20 +00:00
# Add a gain parameter if we have been provided one.
if gain != - 1 :
gain_param = ' -g %.1f ' % gain
else :
gain_param = ' '
2017-12-20 02:08:02 +00:00
# Example command:
# rtl_fm -p 0 -g 26.0 -M fm -F9 -s 12k -f 400500000 | sox -t raw -r 12k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - highpass 20 lowpass 2500 2>/dev/null | ./rs92ecc
2017-12-18 07:31:37 +00:00
decode_cmd = " rtl_fm %s -p %d %s -M fm -F9 -s 12k -f %d 2>/dev/null | " % ( bias_option , int ( ppm ) , gain_param , frequency )
2017-04-29 14:06:47 +00:00
decode_cmd + = " sox -t raw -r 12k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - lowpass 2500 highpass 20 2>/dev/null | "
2017-04-30 09:23:19 +00:00
# Note: I've got the check-CRC option hardcoded in here as always on.
# I figure this is prudent if we're going to proceed to push this telemetry data onto a map.
if ephemeris != None :
2017-12-22 07:22:32 +00:00
decode_cmd + = " ./rs92ecc -vx -v --crc --ecc --vel -e %s " % ephemeris
2017-05-05 12:56:41 +00:00
elif almanac != None :
2017-12-22 07:22:32 +00:00
decode_cmd + = " ./rs92ecc -vx -v --crc --ecc --vel -a %s " % almanac
2017-04-29 14:06:47 +00:00
2017-12-17 01:44:20 +00:00
logging . debug ( " Running command: %s " % decode_cmd )
2017-05-06 23:42:46 +00:00
rx_last_line = time . time ( )
2017-04-29 14:06:47 +00:00
2017-05-06 23:42:46 +00:00
# Receiver subprocess. Discard stderr, and feed stdout into an asynchronous read class.
rx = subprocess . Popen ( decode_cmd , shell = True , stdin = None , stdout = subprocess . PIPE , preexec_fn = os . setsid )
rx_stdout = AsynchronousFileReader ( rx . stdout , autostart = True )
2017-04-29 14:06:47 +00:00
2017-12-20 09:28:24 +00:00
_log_file = None
2017-05-06 23:42:46 +00:00
while not rx_stdout . eof ( ) :
for line in rx_stdout . readlines ( ) :
2017-04-29 14:06:47 +00:00
if ( line != None ) and ( line != " " ) :
2017-05-06 23:42:46 +00:00
try :
data = process_rs_line ( line )
# Reset timeout counter.
rx_last_line = time . time ( )
if data != None :
# Add in a few fields that don't come from the sonde telemetry.
data [ ' freq ' ] = " %.3f MHz " % ( frequency / 1e6 )
data [ ' type ' ] = " RS92 "
2017-12-22 10:09:48 +00:00
# If we are seeing any aux data (i.e. there is something strapped to this RS92), append '-Ozone' to the type.
if ' aux ' in data . keys ( ) :
_ozone = " -Ozone "
else :
_ozone = " "
2017-12-20 09:28:24 +00:00
# Per-Sonde Logging
if save_log :
if _log_file is None :
_log_file_name = " ./log/ %s _ %s _ %s _ %d .log " % (
datetime . datetime . utcnow ( ) . strftime ( " % Y % m %d - % H % M % S " ) ,
data [ ' id ' ] ,
2017-12-22 10:09:48 +00:00
( data [ ' type ' ] + _ozone ) ,
2017-12-20 09:28:24 +00:00
int ( frequency / 1e3 ) )
_log_file = open ( _log_file_name , ' wb ' )
# Write a log line
# datetime,id,frame_no,lat,lon,alt,type,frequency
_log_line = " %s , %s , %d , %.5f , %.5f , %.1f , %s , %.3f \n " % (
data [ ' datetime_str ' ] ,
data [ ' id ' ] ,
data [ ' frame ' ] ,
data [ ' lat ' ] ,
data [ ' lon ' ] ,
data [ ' alt ' ] ,
2017-12-22 10:09:48 +00:00
( data [ ' type ' ] + _ozone ) ,
2017-12-20 09:28:24 +00:00
frequency / 1e6 )
_log_file . write ( _log_line )
_log_file . flush ( )
2017-05-08 07:04:33 +00:00
update_flight_stats ( data )
2017-05-06 23:42:46 +00:00
if rx_queue != None :
try :
2017-07-16 10:11:13 +00:00
internet_push_queue . put_nowait ( data )
ozi_push_queue . put_nowait ( data )
2017-05-06 23:42:46 +00:00
except :
pass
except :
traceback . print_exc ( )
logging . error ( " Error parsing line: %s " % line )
# Check timeout counter.
if time . time ( ) > ( rx_last_line + timeout ) :
logging . error ( " RX Timed out. " )
break
# Sleep for a short time.
time . sleep ( 0.1 )
2017-12-20 09:28:24 +00:00
# If we were writing a log, close the file.
if _log_file != None :
_log_file . flush ( )
_log_file . close ( )
2017-05-06 23:42:46 +00:00
logging . error ( " Closing RX Thread. " )
os . killpg ( os . getpgid ( rx . pid ) , signal . SIGTERM )
rx_stdout . stop ( )
rx_stdout . join ( )
return
2017-04-29 14:06:47 +00:00
2017-12-20 09:28:24 +00:00
def decode_rs41 ( frequency , ppm = 0 , gain = - 1 , bias = False , rx_queue = None , timeout = 120 , save_log = False ) :
2017-04-30 09:23:19 +00:00
""" Decode a RS41 sonde """
2017-07-16 10:11:13 +00:00
global latest_sonde_data , internet_push_queue , ozi_push_queue
# Add a -T option if bias is enabled
bias_option = " -T " if bias else " "
2017-12-17 01:44:20 +00:00
# Add a gain parameter if we have been provided one.
if gain != - 1 :
gain_param = ' -g %.1f ' % gain
else :
gain_param = ' '
2017-12-20 04:23:29 +00:00
# Note: Have removed a 'highpass 20' filter from the sox line, will need to re-evaluate if adding that is useful in the future.
2017-12-18 07:31:37 +00:00
decode_cmd = " rtl_fm %s -p %d %s -M fm -F9 -s 15k -f %d 2>/dev/null | " % ( bias_option , int ( ppm ) , gain_param , frequency )
2017-12-20 04:23:29 +00:00
decode_cmd + = " sox -t raw -r 15k -e s -b 16 -c 1 - -r 48000 -b 8 -t wav - lowpass 2600 2>/dev/null | "
2017-04-30 09:23:19 +00:00
# Note: I've got the check-CRC option hardcoded in here as always on.
# I figure this is prudent if we're going to proceed to push this telemetry data onto a map.
2017-12-09 10:11:53 +00:00
decode_cmd + = " ./rs41ecc --crc --ecc " # if this doesn't work try -i at the end
2017-04-30 09:23:19 +00:00
2017-12-17 01:44:20 +00:00
logging . debug ( " Running command: %s " % decode_cmd )
2017-05-06 23:42:46 +00:00
rx_last_line = time . time ( )
2017-04-30 09:23:19 +00:00
2017-05-06 23:42:46 +00:00
# Receiver subprocess. Discard stderr, and feed stdout into an asynchronous read class.
rx = subprocess . Popen ( decode_cmd , shell = True , stdin = None , stdout = subprocess . PIPE , preexec_fn = os . setsid )
rx_stdout = AsynchronousFileReader ( rx . stdout , autostart = True )
2017-04-30 09:23:19 +00:00
2017-12-20 09:28:24 +00:00
_log_file = None
2017-05-06 23:42:46 +00:00
while not rx_stdout . eof ( ) :
for line in rx_stdout . readlines ( ) :
2017-04-30 09:23:19 +00:00
if ( line != None ) and ( line != " " ) :
2017-05-06 23:42:46 +00:00
try :
data = process_rs_line ( line )
# Reset timeout counter.
rx_last_line = time . time ( )
if data != None :
# Add in a few fields that don't come from the sonde telemetry.
data [ ' freq ' ] = " %.3f MHz " % ( frequency / 1e6 )
data [ ' type ' ] = " RS41 "
2017-12-20 09:28:24 +00:00
# Per-Sonde Logging
if save_log :
if _log_file is None :
_log_file_name = " ./log/ %s _ %s _ %s _ %d .log " % (
datetime . datetime . utcnow ( ) . strftime ( " % Y % m %d - % H % M % S " ) ,
data [ ' id ' ] ,
data [ ' type ' ] ,
int ( frequency / 1e3 ) )
_log_file = open ( _log_file_name , ' wb ' )
# Write a log line
# datetime,id,frame_no,lat,lon,alt,type,frequency
_log_line = " %s , %s , %d , %.5f , %.5f , %.1f , %s , %.3f \n " % (
data [ ' datetime_str ' ] ,
data [ ' id ' ] ,
data [ ' frame ' ] ,
data [ ' lat ' ] ,
data [ ' lon ' ] ,
data [ ' alt ' ] ,
data [ ' type ' ] ,
frequency / 1e6 )
_log_file . write ( _log_line )
_log_file . flush ( )
2017-07-16 10:11:13 +00:00
update_flight_stats ( data )
2017-05-06 23:42:46 +00:00
latest_sonde_data = data
if rx_queue != None :
try :
2017-07-16 10:11:13 +00:00
internet_push_queue . put_nowait ( data )
ozi_push_queue . put_nowait ( data )
2017-05-06 23:42:46 +00:00
except :
pass
except :
traceback . print_exc ( )
logging . error ( " Error parsing line: %s " % line )
# Check timeout counter.
if time . time ( ) > ( rx_last_line + timeout ) :
logging . error ( " RX Timed out. " )
break
# Sleep for a short time.
time . sleep ( 0.1 )
2017-12-20 09:28:24 +00:00
# If we were writing a log, close the file.
if _log_file != None :
_log_file . flush ( )
_log_file . close ( )
2017-05-06 23:42:46 +00:00
logging . error ( " Closing RX Thread. " )
os . killpg ( os . getpgid ( rx . pid ) , signal . SIGTERM )
rx_stdout . stop ( )
rx_stdout . join ( )
return
2017-04-30 09:23:19 +00:00
2017-05-05 12:56:41 +00:00
def internet_push_thread ( station_config ) :
2017-12-17 04:25:20 +00:00
""" Push a frame of sonde data into various internet services (APRS-IS, Habitat), and also to a rotator (if configured) """
2017-12-17 05:26:55 +00:00
global internet_push_queue , INTERNET_PUSH_RUNNING
2017-12-17 04:25:20 +00:00
logging . info ( " Started Internet Push thread. " )
2017-05-07 07:32:56 +00:00
while INTERNET_PUSH_RUNNING :
data = None
2017-04-29 14:06:47 +00:00
try :
2017-05-07 13:42:03 +00:00
# Wait until there is somethign in the queue before trying to process.
if internet_push_queue . empty ( ) :
time . sleep ( 1 )
continue
else :
# Read in entire contents of queue, and keep the most recent entry.
while not internet_push_queue . empty ( ) :
data = internet_push_queue . get ( )
2017-04-29 14:06:47 +00:00
except :
2017-05-07 13:42:03 +00:00
traceback . print_exc ( )
2017-04-29 14:06:47 +00:00
continue
2017-05-08 12:55:33 +00:00
try :
# Wrap this entire section in a try/except, to catch any data parsing errors.
# APRS Upload
if station_config [ ' enable_aprs ' ] :
# Produce aprs comment, based on user config.
aprs_comment = station_config [ ' aprs_custom_comment ' ]
aprs_comment = aprs_comment . replace ( " <freq> " , data [ ' freq ' ] )
aprs_comment = aprs_comment . replace ( " <id> " , data [ ' id ' ] )
aprs_comment = aprs_comment . replace ( " <vel_v> " , " %.1f m/s " % data [ ' vel_v ' ] )
2017-12-22 10:09:48 +00:00
# Add 'Ozone' to the sonde type field if we are seeing aux data.
_sonde_type = data [ ' type ' ]
if ' aux ' in data . keys ( ) :
_sonde_type + = " -Ozone "
aprs_comment = aprs_comment . replace ( " <type> " , _sonde_type )
2017-05-08 12:55:33 +00:00
# Push data to APRS.
2017-12-17 05:22:36 +00:00
aprs_data = push_balloon_to_aprs ( data ,
object_name = station_config [ ' aprs_object_id ' ] ,
aprs_comment = aprs_comment ,
aprsUser = station_config [ ' aprs_user ' ] ,
aprsPass = station_config [ ' aprs_pass ' ] )
2017-12-17 05:35:30 +00:00
logging . info ( " Data pushed to APRS-IS: %s " % aprs_data )
2017-05-08 12:55:33 +00:00
# Habitat Upload
if station_config [ ' enable_habitat ' ] :
2017-12-22 00:29:39 +00:00
# We make the habitat comment field fixed, as we only need to add the payload type/serial/frequency.
2017-12-22 10:09:48 +00:00
# If we are seeing aux data, it likely means we have an Ozone sonde!
if ' aux ' in data . keys ( ) :
_ozone = " -Ozone "
else :
_ozone = " "
# Create comment field.
habitat_comment = " %s %s %s %s " % ( data [ ' type ' ] , _ozone , data [ ' id ' ] , data [ ' freq ' ] )
2017-12-22 00:29:39 +00:00
habitat_upload_payload_telemetry ( data ,
payload_callsign = config [ ' payload_callsign ' ] ,
callsign = config [ ' uploader_callsign ' ] ,
comment = habitat_comment )
2017-12-17 01:44:20 +00:00
logging . debug ( " Data pushed to Habitat. " )
2017-12-17 04:25:20 +00:00
2017-12-17 05:22:36 +00:00
# Update Rotator positon, if configured.
if config [ ' enable_rotator ' ] and ( config [ ' station_lat ' ] != 0.0 ) and ( config [ ' station_lon ' ] != 0.0 ) :
# Calculate Azimuth & Elevation to Radiosonde.
rel_position = position_info ( ( config [ ' station_lat ' ] , config [ ' station_lon ' ] , config [ ' station_alt ' ] ) ,
( data [ ' lat ' ] , data [ ' lon ' ] , data [ ' alt ' ] ) )
2017-12-17 04:25:20 +00:00
# Update the rotator with the current sonde position.
2017-12-17 05:22:36 +00:00
update_rotctld ( hostname = config [ ' rotator_hostname ' ] ,
port = config [ ' rotator_port ' ] ,
azimuth = rel_position [ ' bearing ' ] ,
elevation = rel_position [ ' elevation ' ] )
2017-05-08 12:55:33 +00:00
except :
2017-12-17 05:26:55 +00:00
logging . error ( " Error while uploading data: %s " % traceback . format_exc ( ) )
2017-05-05 12:56:41 +00:00
2017-05-13 04:00:03 +00:00
if station_config [ ' synchronous_upload ' ] :
# Sleep for a second to ensure we don't double upload in the same slot (shouldn't' happen, but anyway...)
time . sleep ( 1 )
# Wait until the next valid uplink timeslot.
# This is determined by waiting until the time since epoch modulus the upload rate is equal to zero.
# Note that this will result in some odd upload times, due to leap seconds and otherwise, but should
# result in multiple stations (assuming local timezones are the same, and the stations are synced to NTP)
# uploading at roughly the same time.
2017-07-16 10:11:13 +00:00
while int ( time . time ( ) ) % station_config [ ' upload_rate ' ] != 0 :
2017-05-13 04:00:03 +00:00
time . sleep ( 0.1 )
else :
# Otherwise, just sleep.
2017-07-16 10:11:13 +00:00
time . sleep ( station_config [ ' upload_rate ' ] )
2017-12-17 04:25:20 +00:00
logging . debug ( " Closing internet push thread. " )
2017-07-16 10:11:13 +00:00
def ozi_push_thread ( station_config ) :
""" Push a frame of sonde data into various internet services (APRS-IS, Habitat) """
global ozi_push_queue , OZI_PUSH_RUNNING
2017-12-17 04:25:20 +00:00
logging . info ( " Started OziPlotter Push thread. " )
2017-07-16 10:11:13 +00:00
while OZI_PUSH_RUNNING :
data = None
try :
# Wait until there is somethign in the queue before trying to process.
if ozi_push_queue . empty ( ) :
time . sleep ( 1 )
continue
else :
# Read in entire contents of queue, and keep the most recent entry.
while not ozi_push_queue . empty ( ) :
data = ozi_push_queue . get ( )
except :
traceback . print_exc ( )
continue
try :
if station_config [ ' ozi_enabled ' ] :
2017-09-09 06:40:20 +00:00
push_telemetry_to_ozi ( data , hostname = station_config [ ' ozi_hostname ' ] , udp_port = station_config [ ' ozi_port ' ] )
if station_config [ ' payload_summary_enabled ' ] :
push_payload_summary ( data , udp_port = station_config [ ' payload_summary_port ' ] )
2017-07-16 10:11:13 +00:00
except :
traceback . print_exc ( )
time . sleep ( station_config [ ' ozi_update_rate ' ] )
2017-04-29 14:06:47 +00:00
2017-12-17 04:25:20 +00:00
logging . debug ( " Closing thread. " )
2017-04-29 02:00:10 +00:00
if __name__ == " __main__ " :
2017-04-29 14:06:47 +00:00
# Setup logging.
2017-12-17 01:44:20 +00:00
logging . basicConfig ( format = ' %(asctime)s %(levelname)s : %(message)s ' , filename = datetime . datetime . utcnow ( ) . strftime ( " log/ % Y % m %d - % H % M % S.log " ) , level = logging_level )
2017-05-11 08:19:30 +00:00
stdout_format = logging . Formatter ( ' %(asctime)s %(levelname)s : %(message)s ' )
stdout_handler = logging . StreamHandler ( sys . stdout )
stdout_handler . setFormatter ( stdout_format )
logging . getLogger ( ) . addHandler ( stdout_handler )
2017-04-29 14:06:47 +00:00
2017-05-05 12:56:41 +00:00
# Command line arguments.
parser = argparse . ArgumentParser ( )
parser . add_argument ( " -c " , " --config " , default = " station.cfg " , help = " Receive Station Configuration File " )
2017-12-17 01:44:20 +00:00
parser . add_argument ( " -f " , " --frequency " , type = float , default = 0.0 , help = " Sonde Frequency (MHz) (bypass scan step, and quit if no sonde found). " )
2017-12-20 09:28:24 +00:00
parser . add_argument ( " -t " , " --timeout " , type = int , default = 180 , help = " Stop receiving after X minutes. Set to 0 to run continuously with no timeout. " )
parser . add_argument ( " -e " , " --ephemeris " , type = str , default = " None " , help = " Use a manually obtained ephemeris file. " )
2017-05-05 12:56:41 +00:00
args = parser . parse_args ( )
2017-12-20 09:28:24 +00:00
# If we haven't been given an ephemeris file, set the ephemeris variable to None, so that we download one.
ephemeris = args . ephemeris
if ephemeris == " None " :
ephemeris = None
else :
logging . info ( " Using provided ephemeris file: %s " % ephemeris )
2017-05-05 12:56:41 +00:00
# Attempt to read in configuration file. Use default config if reading fails.
config = read_auto_rx_config ( args . config )
2017-12-17 01:44:20 +00:00
logging . debug ( " Using Configuration: %s " % str ( config ) )
2017-05-05 12:56:41 +00:00
2017-12-20 09:28:24 +00:00
# Set the timeout
2017-05-06 23:42:46 +00:00
timeout_time = time . time ( ) + int ( args . timeout ) * 60
2017-05-05 12:56:41 +00:00
2017-05-06 23:42:46 +00:00
# Internet push thread object.
2017-07-16 10:11:13 +00:00
push_thread_1 = None
push_thread_2 = None
2017-05-05 12:56:41 +00:00
2017-05-06 23:42:46 +00:00
# Sonde Frequency & Type variables.
sonde_freq = None
2017-04-29 14:06:47 +00:00
sonde_type = None
2017-12-20 04:23:29 +00:00
try :
# If Habitat upload is enabled and we have been provided with listener coords, push our position to habitat
if config [ ' enable_habitat ' ] and ( config [ ' station_lat ' ] != 0.0 ) and ( config [ ' station_lon ' ] != 0.0 ) and config [ ' upload_listener_position ' ] :
uploadListenerPosition ( config [ ' uploader_callsign ' ] , config [ ' station_lat ' ] , config [ ' station_lon ' ] )
# Main scan & track loop. We keep on doing this until we timeout (i.e. after we expect the sonde to have landed)
2017-05-08 07:04:33 +00:00
2017-12-20 04:23:29 +00:00
while time . time ( ) < timeout_time or args . timeout == 0 :
# Attempt to detect a sonde on a supplied frequency.
if args . frequency != 0.0 :
sonde_type = detect_sonde ( int ( float ( args . frequency ) * 1e6 ) , ppm = config [ ' rtlsdr_ppm ' ] , gain = config [ ' rtlsdr_gain ' ] , bias = config [ ' rtlsdr_bias ' ] )
if sonde_type != None :
sonde_freq = int ( float ( args . frequency ) * 1e6 )
else :
logging . info ( " No sonde found. Exiting. " )
sys . exit ( 1 )
2017-04-29 14:06:47 +00:00
2017-12-20 04:23:29 +00:00
# If we have a rotator configured, attempt to point the rotator to the home location
if config [ ' enable_rotator ' ] and ( config [ ' station_lat ' ] != 0.0 ) and ( config [ ' station_lon ' ] != 0.0 ) and config [ ' rotator_homing_enabled ' ] :
update_rotctld ( hostname = config [ ' rotator_hostname ' ] ,
port = config [ ' rotator_port ' ] ,
azimuth = config [ ' rotator_home_azimuth ' ] ,
elevation = config [ ' rotator_home_elevation ' ] )
2017-04-29 14:06:47 +00:00
2017-12-20 04:23:29 +00:00
# If nothing is detected, or we haven't been supplied a frequency, perform a scan.
if sonde_type == None :
( sonde_freq , sonde_type ) = sonde_search ( config , config [ ' search_attempts ' ] )
2017-04-29 14:06:47 +00:00
2017-12-20 04:23:29 +00:00
# If we *still* haven't detected a sonde... just keep on trying, until we hit our timeout.
if sonde_type == None :
continue
2017-12-17 04:25:20 +00:00
2017-12-20 04:23:29 +00:00
logging . info ( " Starting decoding of %s on %.3f MHz " % ( sonde_type , sonde_freq / 1e6 ) )
2017-07-16 10:11:13 +00:00
2017-12-20 04:23:29 +00:00
# Re-push our listener position to habitat, as if we have been running continuously we may have dropped off the map.
if config [ ' enable_habitat ' ] and ( config [ ' station_lat ' ] != 0.0 ) and ( config [ ' station_lon ' ] != 0.0 ) and config [ ' upload_listener_position ' ] :
uploadListenerPosition ( config [ ' uploader_callsign ' ] , config [ ' station_lat ' ] , config [ ' station_lon ' ] )
2017-04-29 02:00:10 +00:00
2017-12-20 04:23:29 +00:00
# Start both of our internet/ozi push threads, even if we're not going to use them.
if push_thread_1 == None :
push_thread_1 = Thread ( target = internet_push_thread , kwargs = { ' station_config ' : config } )
push_thread_1 . start ( )
2017-04-29 02:00:10 +00:00
2017-12-20 04:23:29 +00:00
if push_thread_2 == None :
push_thread_2 = Thread ( target = ozi_push_thread , kwargs = { ' station_config ' : config } )
push_thread_2 . start ( )
2017-04-29 02:00:10 +00:00
2017-12-20 04:23:29 +00:00
# Start decoding the sonde!
if sonde_type == " RS92 " :
2017-12-20 09:28:24 +00:00
decode_rs92 ( sonde_freq ,
ppm = config [ ' rtlsdr_ppm ' ] ,
gain = config [ ' rtlsdr_gain ' ] ,
bias = config [ ' rtlsdr_bias ' ] ,
rx_queue = internet_push_queue ,
timeout = config [ ' rx_timeout ' ] ,
save_log = config [ ' per_sonde_log ' ] ,
ephemeris = ephemeris )
2017-12-20 04:23:29 +00:00
elif sonde_type == " RS41 " :
2017-12-20 09:28:24 +00:00
decode_rs41 ( sonde_freq ,
ppm = config [ ' rtlsdr_ppm ' ] ,
gain = config [ ' rtlsdr_gain ' ] ,
bias = config [ ' rtlsdr_bias ' ] ,
rx_queue = internet_push_queue ,
timeout = config [ ' rx_timeout ' ] ,
save_log = config [ ' per_sonde_log ' ] )
2017-12-20 04:23:29 +00:00
else :
pass
# Receiver has timed out. Reset sonde type and frequency variables and loop.
logging . error ( " Receiver timed out. Re-starting scan. " )
time . sleep ( config [ ' search_delay ' ] )
sonde_type = None
sonde_freq = None
except KeyboardInterrupt :
logging . info ( " Caught CTRL-C, exiting. " )
# Shut down the Internet Push Threads.
INTERNET_PUSH_RUNNING = False
OZI_PUSH_RUNNING = False
# Kill all rtl_fm processes.
os . system ( ' killall rtl_power ' )
os . system ( ' killall rtl_fm ' )
sys . exit ( 0 )
2017-12-17 04:25:20 +00:00
# Note that if we are running as a service, we won't ever get here.
2017-05-06 23:42:46 +00:00
logging . info ( " Exceeded maximum receive time. Exiting. " )
2017-04-29 02:00:10 +00:00
2017-05-08 07:04:33 +00:00
# Write flight statistics to file.
if flight_stats [ ' last ' ] != None :
stats_str = calculate_flight_statistics ( )
logging . info ( stats_str )
2017-04-29 02:00:10 +00:00
2017-05-06 23:42:46 +00:00
f = open ( " last_positions.txt " , ' a ' )
2017-05-08 07:04:33 +00:00
f . write ( stats_str + " \n " )
2017-05-06 23:42:46 +00:00
f . close ( )
2017-04-29 02:00:10 +00:00
2017-04-30 04:41:09 +00:00
# Stop the APRS output thread.
2017-05-05 12:56:41 +00:00
INTERNET_PUSH_RUNNING = False
2017-04-29 02:00:10 +00:00