Porównaj commity

...

20 Commity

Autor SHA1 Wiadomość Data
Mark Jessop da27f9929a
Merge pull request #863 from darksidelemm/testing
Remove rm log_power line from autorx.sh
2024-02-01 17:03:08 +10:30
Mark Jessop ee54d3049e Remove rm log_power line from autorx.sh 2024-02-01 17:02:22 +10:30
Mark Jessop a81094884f
Merge pull request #862 from argilo/log-timeout
Log the shutdown timeout if set
2024-02-01 10:02:12 +10:30
Clayton Smith 2323ac5ef0 Log the shutdown timeout if set 2024-01-31 09:40:14 -05:00
Mark Jessop dc367cafcc
Merge pull request #861 from darksidelemm/testing
Add sample count to fsk demod stats output, bump testing beta count
2024-01-31 16:54:46 +10:30
Mark Jessop 6cf49620fd Add sample count to fsk demod stats output, bump testing beta count 2024-01-31 16:53:34 +10:30
Mark Jessop 8e20b72522
Merge pull request #860 from argilo/remove-habitat
Remove unused Habitat code
2024-01-31 16:32:25 +10:30
Mark Jessop 468712ec02
Merge pull request #859 from argilo/web-live-kml-refactor
Generate live KML using ElementTree
2024-01-31 16:01:30 +10:30
Clayton Smith 765947c550 Remove unused Habitat code 2024-01-31 00:26:21 -05:00
Clayton Smith 5e2c50ce20 Show recent sondes at the top of the list 2024-01-30 22:19:50 -05:00
Clayton Smith 58a183dffa Generate live KML using ElementTree 2024-01-30 20:12:56 -05:00
Mark Jessop 0ea4558aa2
Merge pull request #858 from argilo/web-generate-kml
Generate KML files from the "Historical" page
2024-01-30 14:18:08 +10:30
Clayton Smith 209a78ab90 Replace print statements with logging 2024-01-29 22:30:02 -05:00
Clayton Smith e227eea6b7 Move KML generation into autorx.log_files 2024-01-29 21:17:25 -05:00
Clayton Smith 65b1da7476 Use autorx.log_files to read logs 2024-01-29 20:51:52 -05:00
Clayton Smith 241376236b Remove unused code 2024-01-29 19:24:17 -05:00
Clayton Smith 6446070401 Add "Generate KML" button to the "Historical" page 2024-01-29 17:10:11 -05:00
Clayton Smith 7d739ab7bb Replace fastkml & shapely with ElementTree 2024-01-29 17:05:04 -05:00
Clayton Smith 54ba77ed2b Remove unused imports 2024-01-28 13:06:42 -05:00
Clayton Smith 52135dbe8d Fix code style 2024-01-28 13:04:58 -05:00
14 zmienionych plików z 337 dodań i 1294 usunięć

Wyświetl plik

@ -38,7 +38,6 @@ from autorx.scan import SondeScanner
from autorx.decode import SondeDecoder, VALID_SONDE_TYPES, DRIFTY_SONDE_TYPES
from autorx.logger import TelemetryLogger
from autorx.email_notification import EmailNotification
from autorx.habitat import HabitatUploader
from autorx.aprs import APRSUploader
from autorx.ozimux import OziUploader
from autorx.sondehub import SondehubUploader
@ -766,9 +765,6 @@ def main():
)
args = parser.parse_args()
# Copy out timeout value, and convert to seconds,
_timeout = args.timeout * 60
# Copy out RS92 ephemeris value, if provided.
if args.ephemeris != "None":
rs92_ephemeris = args.ephemeris
@ -827,6 +823,11 @@ def main():
logging.getLogger("engineio").setLevel(logging.ERROR)
logging.getLogger("geventwebsocket").setLevel(logging.ERROR)
# Copy out timeout value, and convert to seconds.
if args.timeout > 0:
logging.info(f"Will shut down automatically after {args.timeout} minutes.")
_timeout = args.timeout * 60
# Check all the RS utilities exist.
logging.debug("Checking if required binaries exist")
if not check_rs_utils(config):
@ -933,30 +934,6 @@ def main():
exporter_objects.append(_email_notification)
exporter_functions.append(_email_notification.add)
# Habitat Uploader - DEPRECATED - Sondehub DB now in use (>1.5.0)
# if config["habitat_enabled"]:
# if config["habitat_upload_listener_position"] is False:
# _habitat_station_position = None
# else:
# _habitat_station_position = (
# config["station_lat"],
# config["station_lon"],
# config["station_alt"],
# )
# _habitat = HabitatUploader(
# user_callsign=config["habitat_uploader_callsign"],
# user_antenna=config["habitat_uploader_antenna"],
# station_position=_habitat_station_position,
# synchronous_upload_time=config["habitat_upload_rate"],
# callsign_validity_threshold=config["payload_id_valid"],
# url=config["habitat_url"],
# )
# exporter_objects.append(_habitat)
# exporter_functions.append(_habitat.add)
# APRS Uploader
if config["aprs_enabled"]:

Wyświetl plik

@ -14,7 +14,4 @@
# change into appropriate directory
cd $(dirname $0)
# Clean up old files
rm log_power*.csv
python3 auto_rx.py -t 180

Wyświetl plik

@ -12,7 +12,7 @@ from queue import Queue
# MINOR - New sonde type support, other fairly big changes that may result in telemetry or config file incompatability issus.
# PATCH - Small changes, or minor feature additions.
__version__ = "1.7.3-beta6"
__version__ = "1.7.3-beta7"
# Global Variables

Wyświetl plik

@ -27,11 +27,10 @@ global_config = {
# Web interface credentials
web_password = "none"
# Fixed minimum update rates for APRS & Habitat
# These are set to avoid congestion on the APRS-IS network, and on the Habitat server
# Please respect other users of these networks and leave these settings as they are.
# Fixed minimum update rate for APRS
# This is set to avoid congestion on the APRS-IS network
# Please respect other users of the network and leave this setting as it is.
MINIMUM_APRS_UPDATE_RATE = 30
MINIMUM_HABITAT_UPDATE_RATE = 30
def read_auto_rx_config(filename, no_sdr_test=False):
@ -98,12 +97,9 @@ def read_auto_rx_config(filename, no_sdr_test=False):
"radius_temporary_block": False,
# "sonde_time_threshold": 3, # Commented out to ensure warning message is shown.
# Habitat Settings
"habitat_enabled": False,
"habitat_upload_rate": 30,
"habitat_uploader_callsign": "SONDE_AUTO_RX",
"habitat_uploader_antenna": "1/4-wave",
"habitat_upload_listener_position": False,
"habitat_payload_callsign": "<id>",
# APRS Settings
"aprs_enabled": False,
"aprs_upload_rate": 30,
@ -166,12 +162,6 @@ def read_auto_rx_config(filename, no_sdr_test=False):
"save_system_log": False,
"enable_debug_logging": False,
"save_cal_data": False,
# URL for the Habitat DB Server.
# As of July 2018 we send via sondehub.org, which will allow us to eventually transition away
# from using the habhub.org tracker, and leave it for use by High-Altitude Balloon Hobbyists.
# For now, sondehub.org just acts as a proxy to habhub.org.
# This setting is not exposed to users as it's only used for unit/int testing
"habitat_url": "https://habitat.sondehub.org/",
# New Sondehub DB Settings
"sondehub_enabled": True,
"sondehub_upload_rate": 30,
@ -298,12 +288,6 @@ def read_auto_rx_config(filename, no_sdr_test=False):
auto_rx_config["max_altitude"] = config.getint("filtering", "max_altitude")
auto_rx_config["max_radius_km"] = config.getint("filtering", "max_radius_km")
# Habitat Settings
# Deprecated from v1.5.0
# auto_rx_config["habitat_enabled"] = config.getboolean(
# "habitat", "habitat_enabled"
# )
# auto_rx_config["habitat_upload_rate"] = config.getint("habitat", "upload_rate")
auto_rx_config["habitat_uploader_callsign"] = config.get(
"habitat", "uploader_callsign"
)
@ -314,19 +298,6 @@ def read_auto_rx_config(filename, no_sdr_test=False):
"habitat", "uploader_antenna"
).strip()
# try: # Use the default configuration if not found
# auto_rx_config["habitat_url"] = config.get("habitat", "url")
# except:
# pass
# Deprecated from v1.5.0
# if auto_rx_config["habitat_upload_rate"] < MINIMUM_HABITAT_UPDATE_RATE:
# logging.warning(
# "Config - Habitat Update Rate clipped to minimum of %d seconds. Please be respectful of other users of Habitat."
# % MINIMUM_HABITAT_UPDATE_RATE
# )
# auto_rx_config["habitat_upload_rate"] = MINIMUM_HABITAT_UPDATE_RATE
# APRS Settings
auto_rx_config["aprs_enabled"] = config.getboolean("aprs", "aprs_enabled")
auto_rx_config["aprs_upload_rate"] = config.getint("aprs", "upload_rate")
@ -564,7 +535,7 @@ def read_auto_rx_config(filename, no_sdr_test=False):
logging.warning(
"Config - Did not find kml_refresh_rate setting, using default (10 seconds)."
)
auto_rx_config["kml_refresh_rate"] = 11
auto_rx_config["kml_refresh_rate"] = 10
# New Sondehub db Settings
try:

Wyświetl plik

@ -1871,7 +1871,6 @@ class SondeDecoder(object):
if __name__ == "__main__":
# Test script.
from .logger import TelemetryLogger
from .habitat import HabitatUploader
logging.basicConfig(
format="%(asctime)s %(levelname)s:%(message)s", level=logging.DEBUG
@ -1883,7 +1882,6 @@ if __name__ == "__main__":
urllib3_log.setLevel(logging.CRITICAL)
_log = TelemetryLogger(log_directory="./testlog/")
_habitat = HabitatUploader(user_callsign="VK5QI_AUTO_RX_DEV", inhibit=False)
try:
_decoder = SondeDecoder(
@ -1891,14 +1889,14 @@ if __name__ == "__main__":
sonde_type="RS41",
timeout=50,
rtl_device_idx="00000002",
exporter=[_habitat.add, _log.add],
exporter=[_log.add],
)
# _decoder2 = SondeDecoder(sonde_freq = 405.5*1e6,
# sonde_type = "RS41",
# timeout = 50,
# rtl_device_idx="00000001",
# exporter=[_habitat.add, _log.add])
# exporter=[_log.add])
while True:
time.sleep(5)
@ -1911,5 +1909,4 @@ if __name__ == "__main__":
traceback.print_exc()
pass
_habitat.close()
_log.close()

Wyświetl plik

@ -1,875 +0,0 @@
#!/usr/bin/env python
#
# radiosonde_auto_rx - Habitat Exporter
#
# Copyright (C) 2018 Mark Jessop <vk5qi@rfhead.net>
# Released under GNU GPL v3 or later
#
import crcmod
import datetime
import logging
import random
import requests
import time
import traceback
import json
from base64 import b64encode
from hashlib import sha256
from queue import Queue
from threading import Thread, Lock
from . import __version__ as auto_rx_version
# These get replaced out after init
url_habitat_uuids = ""
url_habitat_db = ""
habitat_url = ""
# CRC16 function
def crc16_ccitt(data):
"""
Calculate the CRC16 CCITT checksum of *data*.
(CRC16 CCITT: start 0xFFFF, poly 0x1021)
Args:
data (str): String to be CRC'd. The string will be encoded to ASCII prior to CRCing.
Return:
str: Resultant checksum as two bytes of hexadecimal.
"""
crc16 = crcmod.predefined.mkCrcFun("crc-ccitt-false")
# Encode to ASCII.
_data_ascii = data.encode("ascii")
return hex(crc16(_data_ascii))[2:].upper().zfill(4)
def sonde_telemetry_to_sentence(telemetry, payload_callsign=None, comment=None):
""" Convert a telemetry data dictionary into a UKHAS-compliant telemetry sentence.
Args:
telemetry (dict): A sonde telemetry dictionary. Refer to the description in the autorx.decode.SondeDecoder docs.
payload_callsign (str): If supplied, override the callsign field with this string.
comment (str): Optional data to add to the comment field of the output sentence.
Returns:
str: UKHAS-compliant telemetry sentence for uploading to Habitat
"""
# We only want HH:MM:SS for uploading to habitat.
_short_time = telemetry["datetime_dt"].strftime("%H:%M:%S")
if payload_callsign is None:
# If we haven't been supplied a callsign, we generate one based on the serial number.
_callsign = "RS_" + telemetry["id"]
else:
_callsign = payload_callsign
_sentence = "$$%s,%d,%s,%.5f,%.5f,%d,%.1f,%.1f,%.1f" % (
_callsign,
telemetry["frame"],
_short_time,
telemetry["lat"],
telemetry["lon"],
int(telemetry["alt"]), # Round to the nearest metre.
telemetry["vel_h"],
telemetry["temp"],
telemetry["humidity"],
)
if "f_centre" in telemetry:
# We have an estimate of the sonde's centre frequency from the modem, use this in place of
# the RX frequency.
# Round to 1 kHz
_freq = round(telemetry["f_centre"] / 1000.0)
# Convert to MHz.
_freq = "%.3f MHz" % (_freq / 1e3)
else:
# Otherwise, use the normal frequency.
_freq = telemetry["freq"]
# Add in a comment field, containing the sonde type, serial number, and frequency.
_sentence += ",%s %s %s" % (telemetry["type"], telemetry["id"], _freq)
# Add in pressure data, if valid (not -1)
if telemetry["pressure"] > 0.0:
_sentence += " %.1fhPa" % telemetry["pressure"]
# Check for Burst/Kill timer data, and add in.
if "bt" in telemetry:
if (telemetry["bt"] != -1) and (telemetry["bt"] != 65535):
_sentence += " BT %s" % time.strftime(
"%H:%M:%S", time.gmtime(telemetry["bt"])
)
# Add in battery voltage, if the field is valid (e.g. not -1)
if telemetry["batt"] > 0.0:
_sentence += " %.1fV" % telemetry["batt"]
# Add on any custom comment data if provided.
if comment != None:
comment = comment.replace(",", "_")
_sentence += " " + comment
_checksum = crc16_ccitt(_sentence[2:])
_output = _sentence + "*" + _checksum + "\n"
return _output
#
# Functions for uploading a listener position to Habitat.
# Derived from https://raw.githubusercontent.com/rossengeorgiev/hab-tools/master/spot2habitat_chase.py
#
callsign_init = False
uuids = []
def check_callsign(callsign, timeout=10):
"""
Check if a payload document exists for a given callsign.
This is done in a bit of a hack-ish way at the moment. We just check to see if there have
been any reported packets for the payload callsign on the tracker.
This should really be replaced with the correct call into the habitat tracker.
Args:
callsign (str): Payload callsign to search for.
timeout (int): Timeout for the search, in seconds. Defaults to 10 seconds.
Returns:
bool: True if callsign has been observed within the last 6 hour, False otherwise.
"""
_url_check_callsign = "http://legacy-snus.habhub.org/tracker/datanew.php?mode=6hours&type=positions&format=json&max_positions=10&position_id=0&vehicle=%s"
logging.debug("Habitat - Checking if %s has been observed recently..." % callsign)
# Perform the request
_r = requests.get(_url_check_callsign % callsign, timeout=timeout)
try:
# Read the response in as JSON
_r_json = _r.json()
# Read out the list of positions for the requested callsign
_positions = _r_json["positions"]["position"]
# If there is at least one position returned, we assume there is a valid payload document.
if len(_positions) > 0:
logging.info(
"Habitat - Callsign %s already present in Habitat DB, not creating new payload doc."
% callsign
)
return True
else:
# Otherwise, we don't, and go create one.
return False
except Exception as e:
# Handle errors with JSON parsing.
logging.error(
"Habitat - Unable to request payload positions from legacy-snus.habhub.org - %s"
% str(e)
)
return False
# Keep an internal cache for which payload docs we've created so we don't spam couchdb with updates
payload_config_cache = {}
def ISOStringNow():
return "%sZ" % datetime.datetime.utcnow().isoformat()
def initPayloadDoc(
serial, description="Meteorology Radiosonde", frequency=401.5, timeout=20
):
"""Creates a payload in Habitat for the radiosonde before uploading"""
global url_habitat_db
payload_data = {
"type": "payload_configuration",
"name": serial,
"time_created": ISOStringNow(),
"metadata": {"description": description},
"transmissions": [
{
"frequency": frequency,
"modulation": "RTTY",
"mode": "USB",
"encoding": "ASCII-8",
"parity": "none",
"stop": 2,
"shift": 350,
"baud": 50,
"description": "DUMMY ENTRY, DATA IS VIA radiosonde_auto_rx",
}
],
"sentences": [
{
"protocol": "UKHAS",
"callsign": serial,
"checksum": "crc16-ccitt",
"fields": [
{"name": "sentence_id", "sensor": "base.ascii_int"},
{"name": "time", "sensor": "stdtelem.time"},
{
"name": "latitude",
"sensor": "stdtelem.coordinate",
"format": "dd.dddd",
},
{
"name": "longitude",
"sensor": "stdtelem.coordinate",
"format": "dd.dddd",
},
{"name": "altitude", "sensor": "base.ascii_int"},
{"name": "speed", "sensor": "base.ascii_float"},
{"name": "temperature_external", "sensor": "base.ascii_float"},
{"name": "humidity", "sensor": "base.ascii_float"},
{"name": "comment", "sensor": "base.string"},
],
"filters": {
"post": [
{"filter": "common.invalid_location_zero", "type": "normal"}
]
},
"description": "radiosonde_auto_rx to Habitat Bridge",
}
],
}
# Perform the POST request to the Habitat DB.
try:
_r = requests.post(url_habitat_db, json=payload_data, timeout=timeout)
if _r.json()["ok"] is True:
logging.info("Habitat - Created a payload document for %s" % serial)
return True
else:
logging.error(
"Habitat - Failed to create a payload document for %s" % serial
)
return False
except Exception as e:
logging.error(
"Habitat - Failed to create a payload document for %s - %s"
% (serial, str(e))
)
return False
def postListenerData(doc, timeout=10):
global uuids, url_habitat_db
# do we have at least one uuid, if not go get more
if len(uuids) < 1:
fetchUuids()
# Attempt to add UUID and time data to document.
try:
doc["_id"] = uuids.pop()
except IndexError:
logging.error("Habitat - Unable to post listener data - no UUIDs available.")
return False
doc["time_uploaded"] = ISOStringNow()
try:
_r = requests.post(url_habitat_db, json=doc, timeout=timeout)
return True
except Exception as e:
logging.error("Habitat - Could not post listener data - %s" % str(e))
return False
def fetchUuids(timeout=10):
global uuids, url_habitat_uuids
_retries = 5
while _retries > 0:
try:
_r = requests.get(url_habitat_uuids % 10, timeout=timeout)
uuids.extend(_r.json()["uuids"])
# logging.debug("Habitat - Got UUIDs")
return
except Exception as e:
logging.error(
"Habitat - Unable to fetch UUIDs, retrying in 10 seconds - %s" % str(e)
)
time.sleep(10)
_retries = _retries - 1
continue
logging.error("Habitat - Gave up trying to get UUIDs.")
return
def initListenerCallsign(callsign, version="", antenna=""):
doc = {
"type": "listener_information",
"time_created": ISOStringNow(),
"data": {
"callsign": callsign,
"antenna": antenna,
"radio": "radiosonde_auto_rx %s" % version,
},
}
resp = postListenerData(doc)
if resp is True:
# logging.debug("Habitat - Listener Callsign Initialized.")
return True
else:
logging.error("Habitat - Unable to initialize callsign.")
return False
def uploadListenerPosition(callsign, lat, lon, version="", antenna=""):
""" Initializer Listener Callsign, and upload Listener Position """
# Attempt to initialize the listeners callsign
resp = initListenerCallsign(callsign, version=version, antenna=antenna)
# If this fails, it means we can't contact the Habitat server,
# so there is no point continuing.
if resp is False:
return False
doc = {
"type": "listener_telemetry",
"time_created": ISOStringNow(),
"data": {
"callsign": callsign,
"chase": False,
"latitude": lat,
"longitude": lon,
"altitude": 0,
"speed": 0,
},
}
# post position to habitat
resp = postListenerData(doc)
if resp is True:
logging.info("Habitat - Station position uploaded.")
return True
else:
logging.error("Habitat - Unable to upload station position.")
return False
#
# Habitat Uploader Class
#
class HabitatUploader(object):
"""
Queued Habitat Telemetry Uploader class
This performs uploads to the Habitat servers, and also handles generation of flight documents.
Incoming telemetry packets are fed into queue, which is checked regularly.
If a new callsign is sighted, a payload document is created in the Habitat DB.
The telemetry data is then converted into a UKHAS-compatible format, before being added to queue to be
uploaded as network speed permits.
If an upload attempt times out, the packet is discarded.
If the queue fills up (probably indicating no network connection, and a fast packet downlink rate),
it is immediately emptied, to avoid upload of out-of-date packets.
Note that this uploader object is intended to handle telemetry from multiple sondes
"""
# We require the following fields to be present in the incoming telemetry dictionary data
REQUIRED_FIELDS = [
"frame",
"id",
"datetime",
"lat",
"lon",
"alt",
"temp",
"type",
"freq",
"freq_float",
"datetime_dt",
]
def __init__(
self,
user_callsign="N0CALL",
station_position=(0.0, 0.0, 0.0),
user_antenna="",
synchronous_upload_time=30,
callsign_validity_threshold=2,
upload_queue_size=16,
upload_timeout=10,
upload_retries=5,
upload_retry_interval=0.25,
user_position_update_rate=6,
inhibit=False,
url="http://habitat.sondehub.org/",
):
""" Initialise a Habitat Uploader object.
Args:
user_callsign (str): Callsign of the uploader.
station_position (tuple): Optional - a tuple consisting of (lat, lon, alt), which if populated,
is used to plot the listener's position on the Habitat map, both when this class is initialised, and
when a new sonde ID is observed.
synchronous_upload_time (int): Upload the most recent telemetry when time.time()%synchronous_upload_time == 0
This is done in an attempt to get multiple stations uploading the same telemetry sentence simultaneously,
and also acts as decimation on the number of sentences uploaded to Habitat.
callsign_validity_threshold (int): Only upload telemetry data if the callsign has been observed more than N times. Default = 5
upload_queue_size (int): Maximum umber of sentences to keep in the upload queue. If the queue is filled,
it will be emptied (discarding the queue contents).
upload_timeout (int): Timeout (Seconds) when performing uploads to Habitat. Default: 10 seconds.
upload_retries (int): Retry an upload up to this many times. Default: 5
upload_retry_interval (int): Time interval between upload retries. Default: 0.25 seconds.
user_position_update_rate (int): Time interval between automatic station position updates, hours.
Set to 6 hours by default, updating any more often than this is not really useful.
inhibit (bool): Inhibit all uploads. Mainly intended for debugging.
"""
self.user_callsign = user_callsign
self.station_position = station_position
self.user_antenna = user_antenna
self.upload_timeout = upload_timeout
self.upload_retries = upload_retries
self.upload_retry_interval = upload_retry_interval
self.upload_queue_size = upload_queue_size
self.synchronous_upload_time = synchronous_upload_time
self.callsign_validity_threshold = callsign_validity_threshold
self.inhibit = inhibit
self.user_position_update_rate = user_position_update_rate
# set the habitat upload url
global url_habitat_uuids, url_habitat_db, habitat_url
url_habitat_uuids = url + "_uuids?count=%d"
url_habitat_db = url + "habitat/"
habitat_url = url
# Our two Queues - one to hold sentences to be upload, the other to temporarily hold
# input telemetry dictionaries before they are converted and processed.
self.habitat_upload_queue = Queue(upload_queue_size)
self.input_queue = Queue()
# Dictionary where we store sorted telemetry data for upload when required.
# Elements will be named after payload IDs, and will contain:
# 'count' (int): Number of times this callsign has been observed. Uploads will only occur when
# this number rises above callsign_validity_threshold.
# 'data' (Queue): A queue of telemetry sentences to be uploaded. When the upload timer fires,
# this queue will be dumped, and the most recent telemetry uploaded.
# 'habitat_document' (bool): Indicates if a habitat document has been created for this payload ID.
# 'listener_updated' (bool): Indicates if the listener position has been updated for the start of this ID's flight.
self.observed_payloads = {}
# Record of when we last uploaded a user station position to Habitat.
self.last_user_position_upload = 0
# Lock for dealing with telemetry uploads.
self.upload_lock = Lock()
# Start the uploader thread.
self.upload_thread_running = True
self.upload_thread = Thread(target=self.habitat_upload_thread)
self.upload_thread.start()
# Start the input queue processing thread.
self.input_processing_running = True
self.input_thread = Thread(target=self.process_queue)
self.input_thread.start()
self.timer_thread_running = True
self.timer_thread = Thread(target=self.upload_timer)
self.timer_thread.start()
def user_position_upload(self):
""" Upload the the station position to Habitat. """
if self.station_position == None:
# Upload is successful, just flag it as OK and move on.
self.last_user_position_upload = time.time()
return False
if (self.station_position[0] != 0.0) or (self.station_position[1] != 0.0):
_success = uploadListenerPosition(
self.user_callsign,
self.station_position[0],
self.station_position[1],
version=auto_rx_version,
antenna=self.user_antenna,
)
self.last_user_position_upload = time.time()
return _success
else:
# No position set, just flag the update as successful.
self.last_user_position_upload = time.time()
return False
def habitat_upload(self, sentence):
""" Upload a UKHAS-standard telemetry sentence to Habitat
Args:
sentence (str): The UKHAS-standard telemetry sentence to upload.
"""
if self.inhibit:
self.log_info("Upload inhibited.")
return
# Generate payload to be uploaded
_sentence_b64 = b64encode(
sentence.encode("ascii")
) # Encode to ASCII to be able to perform B64 encoding...
_date = datetime.datetime.utcnow().isoformat("T") + "Z"
_user_call = self.user_callsign
_data = {
"type": "payload_telemetry",
"data": {
"_raw": _sentence_b64.decode(
"ascii"
) # ... but decode back to a string to enable JSON serialisation.
},
"receivers": {
_user_call: {"time_created": _date, "time_uploaded": _date,},
},
}
# The URL to upload to.
_url = (
habitat_url
+ "habitat/_design/payload_telemetry/_update/add_listener/%s"
% sha256(_sentence_b64).hexdigest()
)
# Delay for a random amount of time between 0 and upload_retry_interval*2 seconds.
time.sleep(random.random() * self.upload_retry_interval * 2.0)
_retries = 0
# When uploading, we have three possible outcomes:
# - Can't connect. No point immediately re-trying in this situation.
# - The packet is uploaded successfuly (201 / 403)
# - There is a upload conflict on the Habitat DB end (409). We can retry and it might work.
while _retries < self.upload_retries:
# Run the request.
try:
headers = {"User-Agent": "autorx-" + auto_rx_version}
_req = requests.put(
_url,
data=json.dumps(_data),
timeout=(self.upload_timeout, 6.1),
headers=headers,
)
except Exception as e:
self.log_error("Upload Failed: %s" % str(e))
return
if _req.status_code == 201 or _req.status_code == 403:
# 201 = Success, 403 = Success, sentence has already seen by others.
self.log_info(
"Uploaded sentence to Habitat successfully: %s" % sentence.strip()
)
_upload_success = True
break
elif _req.status_code == 409:
# 409 = Upload conflict (server busy). Sleep for a moment, then retry.
self.log_debug("Upload conflict.. retrying.")
time.sleep(random.random() * self.upload_retry_interval)
_retries += 1
else:
self.log_error(
"Error uploading to Habitat. Status Code: %d %s."
% (_req.status_code, _req.text)
)
break
if _retries == self.upload_retries:
self.log_error(
"Upload conflict not resolved with %d retries." % self.upload_retries
)
return
def habitat_upload_thread(self):
""" Handle uploading of packets to Habitat """
self.log_debug("Started Habitat Uploader Thread.")
while self.upload_thread_running:
if self.habitat_upload_queue.qsize() > 0:
# If the queue is completely full, jump to the most recent telemetry sentence.
if self.habitat_upload_queue.qsize() == self.upload_queue_size:
while not self.habitat_upload_queue.empty():
try:
sentence = self.habitat_upload_queue.get_nowait()
except:
pass
self.log_warning(
"Upload queue was full when reading from queue, now flushed - possible connectivity issue."
)
else:
# Otherwise, get the first item in the queue.
sentence = self.habitat_upload_queue.get()
# Attempt to upload it.
if sentence:
self.habitat_upload(sentence)
else:
# Wait for a short time before checking the queue again.
time.sleep(0.1)
self.log_debug("Stopped Habitat Uploader Thread.")
def handle_telem_dict(self, telem, immediate=False):
# Try and convert it to a UKHAS sentence
try:
_sentence = sonde_telemetry_to_sentence(telem)
except Exception as e:
self.log_error("Error converting telemetry to sentence - %s" % str(e))
return
_callsign = "RS_" + telem["id"]
# Wait for the upload_lock to be available, to ensure we don't end up with
# race conditions resulting in multiple payload docs being created.
self.upload_lock.acquire()
# Habitat Payload document creation has been disabled as of 2020-03-20.
# We now use a common payload document for all radiosonde telemetry.
#
# # Create a habitat document if one does not already exist:
# if not self.observed_payloads[telem['id']]['habitat_document']:
# # Check if there has already been telemetry from this ID observed on Habhub
# _document_exists = check_callsign(_callsign)
# # If so, we don't need to create a new document
# if _document_exists:
# self.observed_payloads[telem['id']]['habitat_document'] = True
# else:
# # Otherwise, we attempt to create a new document.
# if self.inhibit:
# # If we have an upload inhibit, don't create a payload doc.
# _created = True
# else:
# _created = initPayloadDoc(_callsign, description="Meteorology Radiosonde", frequency=telem['freq_float'])
# if _created:
# self.observed_payloads[telem['id']]['habitat_document'] = True
# else:
# self.log_error("Error creating payload document!")
# self.upload_lock.release()
# return
if immediate:
self.log_info(
"Performing immediate upload for first telemetry sentence of %s."
% telem["id"]
)
self.habitat_upload(_sentence)
else:
# Attept to add it to the habitat uploader queue.
try:
if self.habitat_upload_queue.qsize() == self.upload_queue_size:
# Flush queue.
while not self.habitat_upload_queue.empty():
try:
self.habitat_upload_queue.get_nowait()
except:
pass
self.log_error(
"Upload queue was full when adding to queue, now flushed - possible connectivity issue."
)
self.habitat_upload_queue.put_nowait(_sentence)
self.log_debug(
"Upload queue size: %d" % self.habitat_upload_queue.qsize()
)
except Exception as e:
self.log_error(
"Error adding sentence to queue, queue likely full. %s" % str(e)
)
self.log_error("Queue Size: %d" % self.habitat_upload_queue.qsize())
self.upload_lock.release()
def upload_timer(self):
""" Add packets to the habitat upload queue if it is time for us to upload. """
while self.timer_thread_running:
if int(time.time()) % self.synchronous_upload_time == 0:
# Time to upload!
for _id in self.observed_payloads.keys():
# If no data, continue...
if self.observed_payloads[_id]["data"].empty():
continue
else:
# Otherwise, dump the queue and keep the latest telemetry.
while not self.observed_payloads[_id]["data"].empty():
_telem = self.observed_payloads[_id]["data"].get()
self.handle_telem_dict(_telem)
# Sleep a second so we don't hit the synchronous upload time again.
time.sleep(1)
else:
# Not yet time to upload, wait for a bit.
time.sleep(0.1)
def process_queue(self):
""" Process packets from the input queue.
This thread handles packets from the input queue (provided by the decoders)
Packets are sorted by ID, and a dictionary entry is created.
"""
while self.input_processing_running:
# Process everything in the queue.
while self.input_queue.qsize() > 0:
# Grab latest telem dictionary.
_telem = self.input_queue.get_nowait()
_id = _telem["id"]
if _id not in self.observed_payloads:
# We haven't seen this ID before, so create a new dictionary entry for it.
self.observed_payloads[_id] = {
"count": 1,
"data": Queue(),
"habitat_document": False,
"first_uploaded": False,
}
self.log_debug(
"New Payload %s. Not observed enough to allow upload." % _id
)
# However, we don't yet add anything to the queue for this payload...
else:
# We have seen this payload before!
# Increment the 'seen' counter.
self.observed_payloads[_id]["count"] += 1
# If we have seen this particular ID enough times, add the data to the ID's queue.
if (
self.observed_payloads[_id]["count"]
>= self.callsign_validity_threshold
):
# If this is the first time we have observed this payload, immediately upload the first position we got.
if self.observed_payloads[_id]["first_uploaded"] == False:
# Because receiving balloon telemetry appears to be a competition, immediately upload the
# first valid position received.
self.handle_telem_dict(_telem, immediate=True)
self.observed_payloads[_id]["first_uploaded"] = True
else:
# Otherwise, add the telemetry to the upload queue
self.observed_payloads[_id]["data"].put(_telem)
else:
self.log_debug(
"Payload ID %s not observed enough to allow upload." % _id
)
# If we haven't uploaded our station position recently, re-upload it.
if (
time.time() - self.last_user_position_upload
) > self.user_position_update_rate * 3600:
self.user_position_upload()
time.sleep(0.1)
def add(self, telemetry):
""" Add a dictionary of telemetry to the input queue.
Args:
telemetry (dict): Telemetry dictionary to add to the input queue.
"""
# Discard any telemetry which is indicated to be encrypted.
if "encrypted" in telemetry:
if telemetry["encrypted"] == True:
return
# Check the telemetry dictionary contains the required fields.
for _field in self.REQUIRED_FIELDS:
if _field not in telemetry:
self.log_error("JSON object missing required field %s" % _field)
return
# Add it to the queue if we are running.
if self.input_processing_running:
self.input_queue.put(telemetry)
else:
self.log_error("Processing not running, discarding.")
def update_station_position(self, lat, lon, alt):
""" Update the internal station position record. Used when determining the station position by GPSD """
self.station_position = (lat, lon, alt)
def close(self):
""" Shutdown uploader and processing threads. """
self.log_debug("Waiting for threads to close...")
self.input_processing_running = False
self.timer_thread_running = False
self.upload_thread_running = False
# Wait for all threads to close.
if self.upload_thread is not None:
self.upload_thread.join(60)
if self.upload_thread.is_alive():
self.log_error("habitat upload thread failed to join")
if self.timer_thread is not None:
self.timer_thread.join(60)
if self.timer_thread.is_alive():
self.log_error("habitat timer thread failed to join")
if self.input_thread is not None:
self.input_thread.join(60)
if self.input_thread.is_alive():
self.log_error("habitat input thread failed to join")
def log_debug(self, line):
""" Helper function to log a debug message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.debug("Habitat - %s" % line)
def log_info(self, line):
""" Helper function to log an informational message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.info("Habitat - %s" % line)
def log_error(self, line):
""" Helper function to log an error message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.error("Habitat - %s" % line)
def log_warning(self, line):
""" Helper function to log a warning message with a descriptive heading.
Args:
line (str): Message to be logged.
"""
logging.warning("Habitat - %s" % line)

Wyświetl plik

@ -14,6 +14,7 @@ import logging
import os.path
import time
import zipfile
import xml.etree.ElementTree as ET
import numpy as np
@ -521,6 +522,126 @@ def zip_log_files(serial_list=None):
return data
def coordinates_to_kml_placemark(lat, lon, alt,
name="Placemark Name",
description="Placemark Description",
absolute=False,
icon="https://maps.google.com/mapfiles/kml/shapes/placemark_circle.png",
scale=1.0):
""" Generate a generic placemark object """
placemark = ET.Element("Placemark")
pm_name = ET.SubElement(placemark, "name")
pm_name.text = name
pm_desc = ET.SubElement(placemark, "description")
pm_desc.text = description
style = ET.SubElement(placemark, "Style")
icon_style = ET.SubElement(style, "IconStyle")
icon_scale = ET.SubElement(icon_style, "scale")
icon_scale.text = str(scale)
pm_icon = ET.SubElement(icon_style, "Icon")
href = ET.SubElement(pm_icon, "href")
href.text = icon
point = ET.SubElement(placemark, "Point")
if absolute:
altitude_mode = ET.SubElement(point, "altitudeMode")
altitude_mode.text = "absolute"
coordinates = ET.SubElement(point, "coordinates")
coordinates.text = f"{lon:.6f},{lat:.6f},{alt:.6f}"
return placemark
def path_to_kml_placemark(flight_path,
name="Flight Path Name",
track_color="ff03bafc",
poly_color="8003bafc",
track_width=2.0,
absolute=True,
extrude=True):
''' Produce a placemark object from a flight path array '''
placemark = ET.Element("Placemark")
pm_name = ET.SubElement(placemark, "name")
pm_name.text = name
style = ET.SubElement(placemark, "Style")
line_style = ET.SubElement(style, "LineStyle")
color = ET.SubElement(line_style, "color")
color.text = track_color
width = ET.SubElement(line_style, "width")
width.text = str(track_width)
if extrude:
poly_style = ET.SubElement(style, "PolyStyle")
color = ET.SubElement(poly_style, "color")
color.text = poly_color
fill = ET.SubElement(poly_style, "fill")
fill.text = "1"
outline = ET.SubElement(poly_style, "outline")
outline.text = "1"
line_string = ET.SubElement(placemark, "LineString")
if absolute:
if extrude:
ls_extrude = ET.SubElement(line_string, "extrude")
ls_extrude.text = "1"
altitude_mode = ET.SubElement(line_string, "altitudeMode")
altitude_mode.text = "absolute"
else:
ls_tessellate = ET.SubElement(line_string, "tessellate")
ls_tessellate.text = "1"
coordinates = ET.SubElement(line_string, "coordinates")
coordinates.text = " ".join(f"{lon:.6f},{lat:.6f},{alt:.6f}" for lat, lon, alt in flight_path)
return placemark
def _log_file_to_kml_folder(filename, absolute=True, extrude=True, last_only=False):
''' Convert a single sonde log file to a KML Folder object '''
# Read file.
_flight_data = read_log_file(filename)
_flight_serial = _flight_data["serial"]
_landing_time = _flight_data["last_time"]
_landing_pos = _flight_data["path"][-1]
_folder = ET.Element("Folder")
_name = ET.SubElement(_folder, "name")
_name.text = _flight_serial
# Generate the placemark & flight track.
_folder.append(coordinates_to_kml_placemark(_landing_pos[0], _landing_pos[1], _landing_pos[2],
name=_flight_serial, description=_landing_time, absolute=absolute))
if not last_only:
_folder.append(path_to_kml_placemark(_flight_data["path"], name="Track",
absolute=absolute, extrude=extrude))
return _folder
def log_files_to_kml(file_list, kml_file, absolute=True, extrude=True, last_only=False):
""" Convert a collection of log files to a KML file """
kml_root = ET.Element("kml", xmlns="http://www.opengis.net/kml/2.2")
kml_doc = ET.SubElement(kml_root, "Document")
for file in file_list:
logging.debug(f"Converting {file} to KML")
try:
kml_doc.append(_log_file_to_kml_folder(file, absolute=absolute,
extrude=extrude, last_only=last_only))
except Exception:
logging.exception(f"Failed to convert {file} to KML")
tree = ET.ElementTree(kml_root)
tree.write(kml_file, encoding="UTF-8", xml_declaration=True)
if __name__ == "__main__":
import sys
import json

Wyświetl plik

@ -181,6 +181,7 @@
$("#showsonde-skew").prop('disabled', true);
$("#hidesonde-skew").prop('disabled', true);
$("#download-logs").prop('disabled', true);
$("#generate-kml").prop('disabled', true);
}
async function enableMenu () {
@ -194,6 +195,7 @@
$("#showsonde-skew").prop('disabled', false);
$("#hidesonde-skew").prop('disabled', false);
$("#download-logs").prop('disabled', false);
$("#generate-kml").prop('disabled', false);
}
if ((window.innerWidth/window.innerHeight) < 1) {
@ -975,6 +977,41 @@
downloadLogs();
});
function generateKML() {
// Generate a KML file from a set of log files.
selectedrows = table.getSelectedData();
if (selectedrows.length > 0) {
// Create the list of log files.
_serial_list = [];
for (let i = 0; i < selectedrows.length; i++){
_serial_list.push(selectedrows[i]['serial']);
}
if(_serial_list.length>50){
if (confirm("Warning - downloading lots of log may take some time. Are you sure?")) {
// Just continue on.
} else {
return;
}
}
if(_serial_list.length == table.getData().length){
// Request all log files
window.open("generate_kml" , '_blank');
}else {
// Just request the selected ones.
// Convert the list to JSON, and then to base64
b64 = btoa(JSON.stringify(_serial_list));
// Make the request in a new tab
window.open("generate_kml/"+b64 , '_blank');
}
}
}
$("#generate-kml").click(function(){
generateKML();
});
// List of available map layers.
var Mapnik = L.tileLayer.provider("OpenStreetMap.Mapnik", {edgeBufferTiles: 2});
var DarkMatter = L.tileLayer.provider("CartoDB.DarkMatter", {edgeBufferTiles: 2});
@ -1472,6 +1509,7 @@
<button id="showsonde-coverage" style="margin:3px auto">Plot Coverage</button>
<button id="showsonde-skew" style="margin:3px auto">Plot Skew-T</button>
<button id="download-logs" style="margin:3px auto">Download Logs</button>
<button id="generate-kml" style="margin:3px auto">Generate KML</button>
</div>
<br>
</div>

Wyświetl plik

@ -689,17 +689,6 @@
sonde_id_data.vel_h = (sonde_id_data.vel_h*3.6).toFixed(1);
// Add a link to HabHub if we have habitat enabled.
// if (autorx_config.sondehub_enabled == true) {
// sonde_id_data.id = "<a href='http://sondehub.org/" + sonde_id.replace(/^(DFM|M10|M20|IMET|IMET54|MRZ)-/,"") + "' target='_blank'>" + sonde_id + "</a>";
// // These links are only going to work for Vaisala radiosondes since the APRS callsign is never passed through to the web interface,
// // and the APRS callsigns for everything other than RS41s and RS92s is different to the 'full' serials
// } else if (autorx_config.aprs_enabled == true && autorx_config.aprs_server == "radiosondy.info") {
// sonde_id_data.id = "<a href='https://radiosondy.info/sonde_archive.php?sondenumber=" + sonde_id + "' target='_blank'>" + sonde_id + "</a>";
// } else if (autorx_config.aprs_enabled == true) {
// sonde_id_data.id = "<a href='https://aprs.fi/#!call=" + sonde_id + "&timerange=3600&tail=3600' target='_blank'>" + sonde_id + "</a>";
// }
sonde_id_data.realid = sonde_id;
// Add SNR data, if it exists.

Wyświetl plik

@ -8,19 +8,30 @@
import base64
import copy
import datetime
import glob
import io
import json
import logging
import os
import random
import requests
import time
import traceback
import sys
import xml.etree.ElementTree as ET
import autorx
import autorx.config
import autorx.scan
from autorx.geometry import GenericTrack
from autorx.utils import check_autorx_versions
from autorx.log_files import list_log_files, read_log_by_serial, zip_log_files
from autorx.log_files import (
list_log_files,
read_log_by_serial,
zip_log_files,
log_files_to_kml,
coordinates_to_kml_placemark,
path_to_kml_placemark
)
from autorx.decode import SondeDecoder
from queue import Queue
from threading import Thread
@ -28,15 +39,6 @@ import flask
from flask import request, abort, make_response, send_file
from flask_socketio import SocketIO
from werkzeug.middleware.proxy_fix import ProxyFix
import re
try:
from simplekml import Kml, AltitudeMode
except ImportError:
print(
"Could not import simplekml! Try running: sudo pip3 install -r requirements.txt"
)
sys.exit(1)
# Inhibit Flask warning message about running a development server... (we know!)
@ -146,47 +148,60 @@ def flask_get_task_list():
def flask_get_kml():
""" Return KML with autorefresh """
_config = autorx.config.global_config
kml = Kml()
netlink = kml.newnetworklink(name="Radiosonde Auto-RX Live Telemetry")
netlink.open = 1
netlink.link.href = flask.request.url_root + "rs_feed.kml"
try:
netlink.link.refreshinterval = _config["kml_refresh_rate"]
except KeyError:
netlink.link.refreshinterval = 10
netlink.link.refreshmode = "onInterval"
return kml.kml(), 200, {"content-type": "application/vnd.google-earth.kml+xml"}
kml_root = ET.Element("kml", xmlns="http://www.opengis.net/kml/2.2")
kml_doc = ET.SubElement(kml_root, "Document")
network_link = ET.SubElement(kml_doc, "NetworkLink")
name = ET.SubElement(network_link, "name")
name.text = "Radiosonde Auto-RX Live Telemetry"
open = ET.SubElement(network_link, "open")
open.text = "1"
link = ET.SubElement(network_link, "Link")
href = ET.SubElement(link, "href")
href.text = flask.request.url_root + "rs_feed.kml"
refresh_mode = ET.SubElement(link, "refreshMode")
refresh_mode.text = "onInterval"
refresh_interval = ET.SubElement(link, "refreshInterval")
refresh_interval.text = str(autorx.config.global_config["kml_refresh_rate"])
kml_string = ET.tostring(kml_root, encoding="UTF-8", xml_declaration=True)
return kml_string, 200, {"content-type": "application/vnd.google-earth.kml+xml"}
@app.route("/rs_feed.kml")
def flask_get_kml_feed():
""" Return KML with RS telemetry """
kml = Kml()
kml.resetidcounter()
kml.document.name = "Track"
kml.document.open = 1
kml_root = ET.Element("kml", xmlns="http://www.opengis.net/kml/2.2")
kml_doc = ET.SubElement(kml_root, "Document")
name = ET.SubElement(kml_doc, "name")
name.text = "Track"
open = ET.SubElement(kml_doc, "open")
open.text = "1"
# Station Placemark
pnt = kml.newpoint(
name="Ground Station",
altitudemode=AltitudeMode.absolute,
kml_doc.append(coordinates_to_kml_placemark(
autorx.config.global_config["station_lat"],
autorx.config.global_config["station_lon"],
autorx.config.global_config["station_alt"],
name=autorx.config.global_config["habitat_uploader_callsign"],
description="AutoRX Ground Station",
)
pnt.open = 1
pnt.iconstyle.icon.href = flask.request.url_root + "static/img/antenna-green.png"
pnt.coords = [
(
autorx.config.global_config["station_lon"],
autorx.config.global_config["station_lat"],
autorx.config.global_config["station_alt"],
)
]
absolute=True,
icon=flask.request.url_root + "static/img/antenna-green.png"
))
for rs_id in flask_telemetry_store:
try:
coordinates = []
for tp in flask_telemetry_store[rs_id]["track"].track_history:
coordinates.append((tp[2], tp[1], tp[3]))
coordinates.append((tp[1], tp[2], tp[3]))
rs_data = """\
{type}/{subtype}
@ -205,56 +220,59 @@ def flask_get_kml_feed():
icon = flask.request.url_root + "static/img/parachute-green.png"
# Add folder
fol = kml.newfolder(name=rs_id)
folder = ET.SubElement(kml_doc, "Folder", id=f"folder_{rs_id}")
name = ET.SubElement(folder, "name")
name.text = rs_id
open = ET.SubElement(folder, "open")
open.text = "1"
# HAB Placemark
pnt = fol.newpoint(
folder.append(coordinates_to_kml_placemark(
flask_telemetry_store[rs_id]["latest_telem"]["lat"],
flask_telemetry_store[rs_id]["latest_telem"]["lon"],
flask_telemetry_store[rs_id]["latest_telem"]["alt"],
name=rs_id,
altitudemode=AltitudeMode.absolute,
description=rs_data.format(
**flask_telemetry_store[rs_id]["latest_telem"]
),
)
pnt.iconstyle.icon.href = icon
pnt.coords = [
description=rs_data.format(**flask_telemetry_store[rs_id]["latest_telem"]),
absolute=True,
icon=icon
))
# Track
folder.append(path_to_kml_placemark(
coordinates,
name="Track",
absolute=True,
extrude=True
))
# LOS line
coordinates = [
(
flask_telemetry_store[rs_id]["latest_telem"]["lon"],
flask_telemetry_store[rs_id]["latest_telem"]["lat"],
flask_telemetry_store[rs_id]["latest_telem"]["alt"],
)
]
linestring = fol.newlinestring(name="Track")
linestring.coords = coordinates
linestring.altitudemode = AltitudeMode.absolute
linestring.extrude = 1
linestring.stylemap.normalstyle.linestyle.color = "ff03bafc"
linestring.stylemap.highlightstyle.linestyle.color = "ff03bafc"
linestring.stylemap.normalstyle.polystyle.color = "AA03bafc"
linestring.stylemap.highlightstyle.polystyle.color = "CC03bafc"
# Add LOS line
linestring = fol.newlinestring(name="LOS")
linestring.altitudemode = AltitudeMode.absolute
linestring.coords = [
(
autorx.config.global_config["station_lon"],
autorx.config.global_config["station_lat"],
autorx.config.global_config["station_lon"],
autorx.config.global_config["station_alt"],
),
(
flask_telemetry_store[rs_id]["latest_telem"]["lon"],
flask_telemetry_store[rs_id]["latest_telem"]["lat"],
flask_telemetry_store[rs_id]["latest_telem"]["lon"],
flask_telemetry_store[rs_id]["latest_telem"]["alt"],
),
]
folder.append(path_to_kml_placemark(
coordinates,
name="LOS",
track_color="ffffffff",
absolute=True,
extrude=False
))
except Exception as e:
logging.error(
"KML - Could not parse data from RS %s - %s" % (rs_id, str(e))
)
return (
re.sub("<Document.*>", "<Document>", kml.kml()),
200,
{"content-type": "application/vnd.google-earth.kml+xml"},
)
kml_string = ET.tostring(kml_root, encoding="UTF-8", xml_declaration=True)
return kml_string, 200, {"content-type": "application/vnd.google-earth.kml+xml"}
@app.route("/get_config")
@ -369,6 +387,55 @@ def flask_export_log_files(serialb64=None):
logging.error("Web - Error handling Zip request:" + str(e))
abort(400)
@app.route("/generate_kml")
@app.route("/generate_kml/<serialb64>")
def flask_generate_kml(serialb64=None):
"""
Generate a KML file from a set of log files.
The list of log files is provided in the URL as a base64-encoded JSON list.
"""
try:
if serialb64:
_serial_list = json.loads(base64.b64decode(serialb64))
_log_files = []
for _serial in _serial_list:
_log_mask = os.path.join(autorx.logging_path, f"*_*{_serial}_*_sonde.log")
_matching_files = glob.glob(_log_mask)
if len(_matching_files) >= 1:
_log_files.append(_matching_files[0])
else:
_log_mask = os.path.join(autorx.logging_path, "*_sonde.log")
_log_files = glob.glob(_log_mask)
_kml_file = io.BytesIO()
_log_files.sort(reverse=True)
log_files_to_kml(_log_files, _kml_file)
_kml_file.seek(0)
_ts = datetime.datetime.strftime(datetime.datetime.utcnow(), "%Y%m%d-%H%M%SZ")
response = make_response(
flask.send_file(
_kml_file,
mimetype="application/vnd.google-earth.kml+xml",
as_attachment=True,
download_name=f"autorx_logfiles_{autorx.config.global_config['habitat_uploader_callsign']}_{_ts}.kml",
)
)
# Add header asking client not to cache the download
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Pragma"] = "no-cache"
return response
except Exception as e:
logging.error("Web - Error handling KML request:" + str(e))
abort(400)
#
# Control Endpoints.
#

Wyświetl plik

@ -1,9 +1,7 @@
crcmod
python-dateutil
flask
flask-socketio
numpy
requests
semver
simplekml
simple-websocket

Wyświetl plik

@ -48,10 +48,7 @@ for _line in _f:
_fest2.append(_data['f2_est'])
_ppm.append(_data['ppm'])
if _time == []:
_time = [0]
else:
_time.append(_time[-1]+1.0/_sps)
_time.append(_data['samples'])
_ebno_max = pd.Series(_ebno).rolling(10).max().dropna().tolist()

Wyświetl plik

@ -5,275 +5,37 @@
#
# 2018-02 Mark Jessop <vk5qi@rfhead.net>
#
# Note: This utility requires the fastkml and shapely libraries, which can be installed using:
# sudo pip install fastkml shapely
#
import sys
import time
import datetime
import traceback
import argparse
import glob
import os
import fastkml
from dateutil.parser import parse
from shapely.geometry import Point, LineString
import sys
from os.path import dirname, abspath
def read_telemetry_csv(filename,
datetime_field = 0,
latitude_field = 3,
longitude_field = 4,
altitude_field = 5,
delimiter=','):
'''
Read in a radiosonde_auto_rx generated telemetry CSV file.
Fields to use can be set as arguments to this function.
These have output like the following:
2017-12-27T23:21:59.560,M2913374,982,-34.95143,138.52471,719.9,-273.0,RS92,401.520
<datetime>,<serial>,<frame_no>,<lat>,<lon>,<alt>,<temp>,<sonde_type>,<freq>
Note that the datetime field must be parsable by dateutil.parsers.parse.
If any fields are missing, or invalid, this function will return None.
The output data structure is in the form:
[
[datetime (as a datetime object), latitude, longitude, altitude, raw_line],
[datetime (as a datetime object), latitude, longitude, altitude, raw_line],
...
]
'''
output = []
f = open(filename,'r')
for line in f:
try:
# Split line by comma delimiters.
_fields = line.split(delimiter)
if _fields[0] == 'timestamp':
# First line in file - header line.
continue
# Attempt to parse fields.
_datetime = parse(_fields[datetime_field])
_latitude = float(_fields[latitude_field])
_longitude = float(_fields[longitude_field])
_altitude = float(_fields[altitude_field])
output.append([_datetime, _latitude, _longitude, _altitude, line])
except:
traceback.print_exc()
return None
f.close()
return output
def flight_burst_position(flight_path):
''' Search through flight data for the burst position and return it. '''
# Read through array and hunt for max altitude point.
current_alt = 0.0
current_index = 0
for i in range(len(flight_path)):
if flight_path[i][3] > current_alt:
current_alt = flight_path[i][3]
current_index = i
return flight_path[current_index]
ns = '{http://www.opengis.net/kml/2.2}'
def new_placemark(lat, lon, alt,
placemark_id="Placemark ID",
name="Placemark Name",
absolute = False,
icon = "http://maps.google.com/mapfiles/kml/shapes/placemark_circle.png",
scale = 1.0):
""" Generate a generic placemark object """
if absolute:
_alt_mode = 'absolute'
else:
_alt_mode = 'clampToGround'
flight_icon_style = fastkml.styles.IconStyle(
ns=ns,
icon_href=icon,
scale=scale)
flight_style = fastkml.styles.Style(
ns=ns,
styles=[flight_icon_style])
flight_placemark = fastkml.kml.Placemark(
ns=ns,
id=placemark_id,
name=name,
description="",
styles=[flight_style])
flight_placemark.geometry = fastkml.geometry.Geometry(
ns=ns,
geometry=Point(lon, lat, alt),
altitude_mode=_alt_mode)
return flight_placemark
def flight_path_to_geometry(flight_path,
placemark_id="Flight Path ID",
name="Flight Path Name",
track_color="aaffffff",
poly_color="20000000",
track_width=2.0,
absolute = True,
extrude = True,
tessellate = True):
''' Produce a fastkml geometry object from a flight path array '''
# Handle selection of absolute altitude mode
if absolute:
_alt_mode = 'absolute'
else:
_alt_mode = 'clampToGround'
# Convert the flight path array [time, lat, lon, alt, comment] into a LineString object.
track_points = []
for _point in flight_path:
# Flight path array is in lat,lon,alt order, needs to be in lon,lat,alt
track_points.append([_point[2],_point[1],_point[3]])
_flight_geom = LineString(track_points)
# Define the Line and Polygon styles, which are used for the flight path, and the extrusions (if enabled)
flight_track_line_style = fastkml.styles.LineStyle(
ns=ns,
color=track_color,
width=track_width)
flight_extrusion_style = fastkml.styles.PolyStyle(
ns=ns,
color=poly_color)
flight_track_style = fastkml.styles.Style(
ns=ns,
styles=[flight_track_line_style, flight_extrusion_style])
# Generate the Placemark which will contain the track data.
flight_line = fastkml.kml.Placemark(
ns=ns,
id=placemark_id,
name=name,
styles=[flight_track_style])
# Add the track data to the Placemark
flight_line.geometry = fastkml.geometry.Geometry(
ns=ns,
geometry=_flight_geom,
altitude_mode=_alt_mode,
extrude=extrude,
tessellate=tessellate)
return flight_line
def write_kml(geom_objects,
filename="output.kml",
comment=""):
""" Write out flight path geometry objects to a kml file. """
kml_root = fastkml.kml.KML()
kml_doc = fastkml.kml.Document(
ns=ns,
name=comment)
if type(geom_objects) is not list:
geom_objects = [geom_objects]
for _flight in geom_objects:
kml_doc.append(_flight)
with open(filename,'w') as kml_file:
kml_file.write(kml_doc.to_string())
kml_file.close()
def convert_single_file(filename, absolute=True, tessellate=True, last_only=False):
''' Convert a single sonde log file to a fastkml KML Folder object '''
# Read file.
_flight_data = read_telemetry_csv(filename)
# Extract the flight's serial number and launch time from the first line in the file.
_first_line = _flight_data[0][4]
_flight_serial = _first_line.split(',')[1] # Serial number is the second field in the line.
_launch_time = _flight_data[0][0].strftime("%Y%m%d-%H%M%SZ")
# Generate a comment line to use in the folder and placemark descriptions
_track_comment = "%s %s" % (_launch_time, _flight_serial)
_landing_comment = "%s Last Position" % (_flight_serial)
# Grab burst and last-seen positions
_burst_pos = flight_burst_position(_flight_data)
_landing_pos = _flight_data[-1]
# Generate the placemark & flight track.
_flight_geom = flight_path_to_geometry(_flight_data, name=_track_comment, absolute=absolute, tessellate=tessellate, extrude=tessellate)
_landing_geom = new_placemark(_landing_pos[1], _landing_pos[2], _landing_pos[3], name=_landing_comment, absolute=absolute)
_folder = fastkml.kml.Folder(ns, _flight_serial, _track_comment, 'Radiosonde Flight Path')
if last_only == False:
_folder.append(_flight_geom)
_folder.append(_landing_geom)
return _folder
parent_dir = dirname(dirname(abspath(__file__)))
sys.path.append(parent_dir)
from autorx.log_files import log_files_to_kml
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", type=str, default="../log/*_sonde.log",
help="Path to log file. May include wildcards, though the path must be wrapped in quotes. Default=../log/*_sonde.log")
parser.add_argument("-o", "--output", type=str, default="sondes.kml", help="KML output file name. Default=sondes.kml")
parser.add_argument('--clamp', action="store_false", default=True, help="Clamp tracks to ground instead of showing absolute altitudes.")
parser.add_argument('--noextrude', action="store_false", default=True, help="Disable Extrusions for absolute flight paths.")
parser.add_argument('--lastonly', action="store_true", default=False, help="Only plot last-seen sonde positions, not the flight paths.")
parser.add_argument("-i", "--input", type=str, default="../log/*_sonde.log",
help="Path to log file. May include wildcards, though the path "
"must be wrapped in quotes. Default=../log/*_sonde.log")
parser.add_argument("-o", "--output", type=str, default="sondes.kml",
help="KML output file name. Default=sondes.kml")
parser.add_argument('--clamp', action="store_false", default=True,
help="Clamp tracks to ground instead of showing absolute altitudes.")
parser.add_argument('--noextrude', action="store_false", default=True,
help="Disable Extrusions for absolute flight paths.")
parser.add_argument('--lastonly', action="store_true", default=False,
help="Only plot last-seen sonde positions, not the flight paths.")
args = parser.parse_args()
_file_list = glob.glob(args.input)
_file_list.sort(reverse=True)
_placemarks = []
for _file in _file_list:
print("Processing: %s" % _file)
try:
_placemarks.append(convert_single_file(_file, absolute=args.clamp, tessellate=args.noextrude, last_only=args.lastonly))
except:
print("Failed to process: %s" % _file)
write_kml(_placemarks, filename=args.output)
with open(args.output, "wb") as kml_file:
log_files_to_kml(_file_list, kml_file, absolute=args.clamp,
extrude=args.noextrude, last_only=args.lastonly)
print("Output saved to: %s" % args.output)

Wyświetl plik

@ -54,6 +54,7 @@ int main(int argc,char *argv[]){
struct FSK *fsk;
struct MODEM_STATS stats;
int Fs,Rs,M,P,stats_ctr,stats_loop;
long sample_count;
float loop_time;
int enable_stats = 0;
FILE *fin,*fout;
@ -280,6 +281,7 @@ int main(int argc,char *argv[]){
for(i=0;i<fsk_nin(fsk);i++){
modbuf[i].real = ((float)rawbuf[i])/FDMDV_SCALE;
modbuf[i].imag = 0.0;
sample_count++;
}
}
else {
@ -289,6 +291,7 @@ int main(int argc,char *argv[]){
for(i=0;i<fsk_nin(fsk);i++){
modbuf[i].real = ((float)rawbuf_u8[2*i]-127.0)/128.0;
modbuf[i].imag = ((float)rawbuf_u8[2*i+1]-127.0)/128.0;
sample_count++;
}
}
else {
@ -296,6 +299,7 @@ int main(int argc,char *argv[]){
for(i=0;i<fsk_nin(fsk);i++){
modbuf[i].real = ((float)rawbuf[2*i])/FDMDV_SCALE;
modbuf[i].imag = ((float)rawbuf[2*i+1]/FDMDV_SCALE);
sample_count++;
}
}
}
@ -356,7 +360,7 @@ int main(int argc,char *argv[]){
fprintf(stderr,"{");
time_t seconds = time(NULL);
fprintf(stderr,"\"secs\": %ld, \"EbNodB\": %5.1f, \"ppm\": %4d,",seconds, stats.snr_est, (int)fsk->ppm);
fprintf(stderr,"\"secs\": %ld, \"samples\": %ld, \"EbNodB\": %5.1f, \"ppm\": %4d,",seconds, sample_count, stats.snr_est, (int)fsk->ppm);
float *f_est;
if (fsk->freq_est_type)
f_est = fsk->f2_est;