PEP8 violations

pull/78/head
Konstantin Gründger 2020-11-22 08:55:19 +01:00
rodzic b9f8e14edf
commit b263b00f7c
27 zmienionych plików z 166 dodań i 175 usunięć

Wyświetl plik

@ -26,23 +26,25 @@ def create_app(config_name='default'):
configuration = configs[config_name] configuration = configs[config_name]
app.config.from_object(configuration) app.config.from_object(configuration)
app.config.from_envvar("OGN_CONFIG_MODULE", silent=True) app.config.from_envvar("OGN_CONFIG_MODULE", silent=True)
# Initialize other things # Initialize other things
bootstrap.init_app(app) bootstrap.init_app(app)
db.init_app(app) db.init_app(app)
migrate.init_app(app, db) migrate.init_app(app, db)
cache.init_app(app) cache.init_app(app)
redis_client.init_app(app) redis_client.init_app(app)
init_celery(app) init_celery(app)
register_blueprints(app) register_blueprints(app)
return app return app
def register_blueprints(app): def register_blueprints(app):
from app.main import bp as bp_main from app.main import bp as bp_main
app.register_blueprint(bp_main) app.register_blueprint(bp_main)
def init_celery(app=None): def init_celery(app=None):
app = app or create_app(os.getenv('FLASK_CONFIG') or 'default') app = app or create_app(os.getenv('FLASK_CONFIG') or 'default')
celery.conf.broker_url = app.config['BROKER_URL'] celery.conf.broker_url = app.config['BROKER_URL']

Wyświetl plik

@ -6,6 +6,7 @@ NOTHING = ""
CONTEST_RELEVANT = "AND agl < 1000" CONTEST_RELEVANT = "AND agl < 1000"
LOW_PASS = "AND agl < 50 and ground_speed > 250" LOW_PASS = "AND agl < 50 and ground_speed > 250"
def compute_flights(date, flight_type=0): def compute_flights(date, flight_type=0):
if flight_type == 0: if flight_type == 0:
filter = NOTHING filter = NOTHING
@ -66,6 +67,7 @@ def compute_flights(date, flight_type=0):
db.session.execute(query) db.session.execute(query)
db.session.commit() db.session.commit()
def compute_gaps(date): def compute_gaps(date):
date_str = date.strftime("%Y-%m-%d") date_str = date.strftime("%Y-%m-%d")
@ -105,9 +107,10 @@ def compute_gaps(date):
db.session.execute(query) db.session.execute(query)
db.session.commit() db.session.commit()
if __name__ == '__main__': if __name__ == '__main__':
from app import create_app from app import create_app
app = create_app() app = create_app()
with app.app_context(): with app.app_context():
result = compute_flights(date=date(2020, 10, 28)) result = compute_flights(date=date(2020, 10, 28))
print(result) print(result)

Wyświetl plik

@ -4,8 +4,10 @@ from flask import current_app
from app import redis_client from app import redis_client
from app.gateway.message_handling import sender_position_csv_strings_to_db, receiver_position_csv_strings_to_db, receiver_status_csv_strings_to_db from app.gateway.message_handling import sender_position_csv_strings_to_db, receiver_position_csv_strings_to_db, receiver_status_csv_strings_to_db
def transfer_from_redis_to_database(): def transfer_from_redis_to_database():
unmapping = lambda s: s[0].decode('utf-8') def unmapping(string):
return string[0].decode('utf-8')
receiver_status_data = list(map(unmapping, redis_client.zpopmin('receiver_status', 100000))) receiver_status_data = list(map(unmapping, redis_client.zpopmin('receiver_status', 100000)))
receiver_position_data = list(map(unmapping, redis_client.zpopmin('receiver_position', 100000))) receiver_position_data = list(map(unmapping, redis_client.zpopmin('receiver_position', 100000)))

Wyświetl plik

@ -46,7 +46,7 @@ def update_takeoff_landings(start, end):
.filter(db.between(SenderPosition.reference_timestamp, start - timedelta(seconds=MAX_EVENT_DURATION), end + timedelta(seconds=MAX_EVENT_DURATION))) .filter(db.between(SenderPosition.reference_timestamp, start - timedelta(seconds=MAX_EVENT_DURATION), end + timedelta(seconds=MAX_EVENT_DURATION)))
.subquery() .subquery()
) )
# make a query with current, previous and next position # make a query with current, previous and next position
sq2 = db.session.query( sq2 = db.session.query(
sq.c.name, sq.c.name,
@ -75,11 +75,11 @@ def update_takeoff_landings(start, end):
# consider only positions between start and end and with predecessor and successor and limit distance and duration between points # consider only positions between start and end and with predecessor and successor and limit distance and duration between points
sq3 = ( sq3 = (
db.session.query(sq2) db.session.query(sq2)
.filter(db.and_(sq2.c.name_prev != db.null(), sq2.c.name_next != db.null())) .filter(db.and_(sq2.c.name_prev != db.null(), sq2.c.name_next != db.null()))
.filter(db.and_(db.func.ST_DistanceSphere(sq2.c.location, sq2.c.location_wkt_prev) < MAX_EVENT_RADIUS, db.func.ST_DistanceSphere(sq2.c.location, sq2.c.location_wkt_next) < MAX_EVENT_RADIUS)) .filter(db.and_(db.func.ST_DistanceSphere(sq2.c.location, sq2.c.location_wkt_prev) < MAX_EVENT_RADIUS, db.func.ST_DistanceSphere(sq2.c.location, sq2.c.location_wkt_next) < MAX_EVENT_RADIUS))
.filter(sq2.c.timestamp_next - sq2.c.timestamp_prev < timedelta(seconds=MAX_EVENT_DURATION)) .filter(sq2.c.timestamp_next - sq2.c.timestamp_prev < timedelta(seconds=MAX_EVENT_DURATION))
.filter(db.between(sq2.c.timestamp, start, end)) .filter(db.between(sq2.c.timestamp, start, end))
.subquery() .subquery()
) )
# find possible takeoffs and landings # find possible takeoffs and landings
@ -131,8 +131,8 @@ def update_takeoff_landings(start, end):
# ... add the country # ... add the country
takeoff_landing_query = ( takeoff_landing_query = (
db.session.query(sq6.c.timestamp, sq6.c.track, sq6.c.is_takeoff, sq6.c.sender_id, sq6.c.airport_id, Country.gid) db.session.query(sq6.c.timestamp, sq6.c.track, sq6.c.is_takeoff, sq6.c.sender_id, sq6.c.airport_id, Country.gid)
.join(Country, sq6.c.country_code==Country.iso2, isouter=True) .join(Country, sq6.c.country_code == Country.iso2, isouter=True)
.subquery() .subquery()
) )
# ... and save them # ... and save them
@ -156,7 +156,7 @@ def update_logbook(offset_days=None):
# limit time range to given date and set window partition and window order # limit time range to given date and set window partition and window order
if offset_days: if offset_days:
(start, end) = date_to_timestamps(datetime.utcnow()-timedelta(days=offset_days)) (start, end) = date_to_timestamps(datetime.utcnow() - timedelta(days=offset_days))
else: else:
(start, end) = date_to_timestamps(datetime.utcnow().date()) (start, end) = date_to_timestamps(datetime.utcnow().date())
pa = TakeoffLanding.sender_id pa = TakeoffLanding.sender_id
@ -181,7 +181,6 @@ def update_logbook(offset_days=None):
db.func.lag(TakeoffLanding.airport_id).over(partition_by=pa, order_by=wo).label("airport_id_prev"), db.func.lag(TakeoffLanding.airport_id).over(partition_by=pa, order_by=wo).label("airport_id_prev"),
db.func.lead(TakeoffLanding.airport_id).over(partition_by=pa, order_by=wo).label("airport_id_next") db.func.lead(TakeoffLanding.airport_id).over(partition_by=pa, order_by=wo).label("airport_id_next")
) )
#.filter(between(TakeoffLanding.timestamp, start, end))
.subquery() .subquery()
) )
@ -195,7 +194,7 @@ def update_logbook(offset_days=None):
) )
.filter(sq.c.is_takeoff == db.true()) .filter(sq.c.is_takeoff == db.true())
.filter(db.or_(sq.c.is_takeoff_next == db.true(), sq.c.is_takeoff_next == db.null())) .filter(db.or_(sq.c.is_takeoff_next == db.true(), sq.c.is_takeoff_next == db.null()))
.filter(~Logbook.query.filter(db.and_(Logbook.sender_id==sq.c.sender_id, Logbook.takeoff_timestamp==sq.c.timestamp, Logbook.takeoff_airport_id==sq.c.airport_id)).exists()) .filter(~Logbook.query.filter(db.and_(Logbook.sender_id == sq.c.sender_id, Logbook.takeoff_timestamp == sq.c.timestamp, Logbook.takeoff_airport_id == sq.c.airport_id)).exists())
) )
ins = insert(Logbook).from_select( ins = insert(Logbook).from_select(
( (
@ -220,7 +219,7 @@ def update_logbook(offset_days=None):
) )
.filter(db.or_(sq.c.is_takeoff_prev == db.false(), sq.c.is_takeoff_prev == db.null())) .filter(db.or_(sq.c.is_takeoff_prev == db.false(), sq.c.is_takeoff_prev == db.null()))
.filter(sq.c.is_takeoff == db.false()) .filter(sq.c.is_takeoff == db.false())
.filter(~Logbook.query.filter(db.and_(Logbook.sender_id==sq.c.sender_id, Logbook.landing_timestamp==sq.c.timestamp, Logbook.landing_airport_id==sq.c.airport_id)).exists()) .filter(~Logbook.query.filter(db.and_(Logbook.sender_id == sq.c.sender_id, Logbook.landing_timestamp == sq.c.timestamp, Logbook.landing_airport_id == sq.c.airport_id)).exists())
) )
ins = insert(Logbook).from_select( ins = insert(Logbook).from_select(
( (
@ -253,9 +252,9 @@ def update_logbook(offset_days=None):
# insert (new) flights # insert (new) flights
new_flights_query = ( new_flights_query = (
db.session.query(complete_flight_query) \ db.session.query(complete_flight_query)
.filter(~Logbook.query.filter(db.and_(Logbook.sender_id==complete_flight_query.c.sender_id, Logbook.landing_timestamp==complete_flight_query.c.landing_timestamp, Logbook.landing_airport_id==complete_flight_query.c.landing_airport_id)).exists()) .filter(~Logbook.query.filter(db.and_(Logbook.sender_id == complete_flight_query.c.sender_id, Logbook.landing_timestamp == complete_flight_query.c.landing_timestamp, Logbook.landing_airport_id == complete_flight_query.c.landing_airport_id)).exists())
.filter(~Logbook.query.filter(db.and_(Logbook.sender_id==complete_flight_query.c.sender_id, Logbook.takeoff_timestamp==complete_flight_query.c.takeoff_timestamp, Logbook.takeoff_airport_id==complete_flight_query.c.takeoff_airport_id)).exists()) .filter(~Logbook.query.filter(db.and_(Logbook.sender_id == complete_flight_query.c.sender_id, Logbook.takeoff_timestamp == complete_flight_query.c.takeoff_timestamp, Logbook.takeoff_airport_id == complete_flight_query.c.takeoff_airport_id)).exists())
) )
ins = insert(Logbook).from_select( ins = insert(Logbook).from_select(
( (
@ -276,17 +275,16 @@ def update_logbook(offset_days=None):
# update existing landing with takeoff from complete flight # update existing landing with takeoff from complete flight
upd = db.update(Logbook) \ upd = db.update(Logbook) \
.where(db.and_( .where(db.and_(
Logbook.sender_id==complete_flight_query.c.sender_id, Logbook.sender_id == complete_flight_query.c.sender_id,
Logbook.takeoff_timestamp==db.null(), Logbook.takeoff_timestamp == db.null(),
Logbook.takeoff_airport_id==db.null(), Logbook.takeoff_airport_id == db.null(),
Logbook.landing_timestamp!=db.null(), Logbook.landing_timestamp != db.null(),
Logbook.landing_timestamp==complete_flight_query.c.landing_timestamp, Logbook.landing_timestamp == complete_flight_query.c.landing_timestamp,
Logbook.landing_airport_id==complete_flight_query.c.landing_airport_id Logbook.landing_airport_id == complete_flight_query.c.landing_airport_id
)) \ )) \
.values(takeoff_timestamp=complete_flight_query.c.takeoff_timestamp, .values(takeoff_timestamp=complete_flight_query.c.takeoff_timestamp,
takeoff_track=complete_flight_query.c.takeoff_track, takeoff_track=complete_flight_query.c.takeoff_track,
takeoff_airport_id=complete_flight_query.c.takeoff_airport_id takeoff_airport_id=complete_flight_query.c.takeoff_airport_id)
)
result = db.session.execute(upd) result = db.session.execute(upd)
current_app.logger.debug(f"Updated {result.rowcount} takeoffs to complete flights") current_app.logger.debug(f"Updated {result.rowcount} takeoffs to complete flights")
db.session.commit() db.session.commit()
@ -294,17 +292,16 @@ def update_logbook(offset_days=None):
# update existing takeoff with landing from complete flight # update existing takeoff with landing from complete flight
upd = db.update(Logbook) \ upd = db.update(Logbook) \
.where(db.and_( .where(db.and_(
Logbook.sender_id==complete_flight_query.c.sender_id, Logbook.sender_id == complete_flight_query.c.sender_id,
Logbook.takeoff_timestamp!=db.null(), Logbook.takeoff_timestamp != db.null(),
Logbook.takeoff_timestamp==complete_flight_query.c.takeoff_timestamp, Logbook.takeoff_timestamp == complete_flight_query.c.takeoff_timestamp,
Logbook.takeoff_airport_id==complete_flight_query.c.takeoff_airport_id, Logbook.takeoff_airport_id == complete_flight_query.c.takeoff_airport_id,
Logbook.landing_timestamp==db.null(), Logbook.landing_timestamp == db.null(),
Logbook.landing_airport_id==db.null() Logbook.landing_airport_id == db.null()
)) \ )) \
.values(landing_timestamp=complete_flight_query.c.landing_timestamp, .values(landing_timestamp=complete_flight_query.c.landing_timestamp,
landing_track=complete_flight_query.c.landing_track, landing_track=complete_flight_query.c.landing_track,
landing_airport_id=complete_flight_query.c.landing_airport_id landing_airport_id=complete_flight_query.c.landing_airport_id)
)
result = db.session.execute(upd) result = db.session.execute(upd)
current_app.logger.debug(f"Updated {result.rowcount} landings to complete flights") current_app.logger.debug(f"Updated {result.rowcount} landings to complete flights")
db.session.commit() db.session.commit()
@ -312,11 +309,10 @@ def update_logbook(offset_days=None):
return return
def update_max_altitudes(): def update_max_altitudes():
MAX_UPDATES = 60 MAX_UPDATES = 60
query = f""" query = """
UPDATE logbooks UPDATE logbooks
SET max_altitude = sq2.max_altitude SET max_altitude = sq2.max_altitude
FROM ( FROM (
@ -347,6 +343,7 @@ def update_max_altitudes():
return update_counter return update_counter
def update_max_altitudes_orm(): def update_max_altitudes_orm():
"""Add max altitudes in logbook when flight is complete (takeoff and landing).""" """Add max altitudes in logbook when flight is complete (takeoff and landing)."""
@ -354,17 +351,17 @@ def update_max_altitudes_orm():
logbook_entries = ( logbook_entries = (
db.session.query(Logbook.id, Sender.name) db.session.query(Logbook.id, Sender.name)
.filter(db.and_(Logbook.takeoff_timestamp != db.null(), Logbook.landing_timestamp != db.null(), Logbook.max_altitude == db.null())) .filter(db.and_(Logbook.takeoff_timestamp != db.null(), Logbook.landing_timestamp != db.null(), Logbook.max_altitude == db.null()))
.filter(Logbook.sender_id == Sender.id) .filter(Logbook.sender_id == Sender.id)
.limit(1) .limit(1)
.subquery() .subquery()
) )
max_altitudes = ( max_altitudes = (
db.session.query(logbook_entries.c.id, db.func.max(SenderPosition.altitude).label("max_altitude")) db.session.query(logbook_entries.c.id, db.func.max(SenderPosition.altitude).label("max_altitude"))
.filter(db.and_(db.between_(SenderPosition.timestamp >= Logbook.takeoff_timestamp, SenderPosition.timestamp <= Logbook.landing_timestamp), SenderPosition.name == logbook_entries.c.name)) .filter(db.and_(db.between_(SenderPosition.timestamp >= Logbook.takeoff_timestamp, SenderPosition.timestamp <= Logbook.landing_timestamp), SenderPosition.name == logbook_entries.c.name))
.group_by(Logbook.id) .group_by(Logbook.id)
.subquery() .subquery()
) )
update_logbooks = db.session.query(Logbook).filter(Logbook.id == max_altitudes.c.id).update({Logbook.max_altitude: max_altitudes.c.max_altitude}, synchronize_session="fetch") update_logbooks = db.session.query(Logbook).filter(Logbook.id == max_altitudes.c.id).update({Logbook.max_altitude: max_altitudes.c.max_altitude}, synchronize_session="fetch")
@ -374,11 +371,12 @@ def update_max_altitudes_orm():
finish_message = "Logbook (altitude): {} entries updated.".format(update_logbooks) finish_message = "Logbook (altitude): {} entries updated.".format(update_logbooks)
return finish_message return finish_message
if __name__ == '__main__': if __name__ == '__main__':
from app import create_app from app import create_app
app = create_app() app = create_app()
with app.app_context(): with app.app_context():
#result = update_takeoff_landings(start=datetime(2020, 11, 9, 10, 0, 0), end=datetime(2020, 11, 9, 15, 30, 0)) result = update_takeoff_landings(start=datetime(2020, 11, 9, 10, 0, 0), end=datetime(2020, 11, 9, 15, 30, 0))
#result = update_logbook() result = update_logbook()
result = update_max_altitudes_orm() result = update_max_altitudes_orm()
print(result) print(result)

Wyświetl plik

@ -3,8 +3,9 @@ from app.utils import get_sql_trustworthy
SQL_TRUSTWORTHY = get_sql_trustworthy(source_table_alias='sp') SQL_TRUSTWORTHY = get_sql_trustworthy(source_table_alias='sp')
def create_views(): def create_views():
db.session.execute(f""" db.session.execute("""
DROP VIEW IF EXISTS receiver_ranking CASCADE; DROP VIEW IF EXISTS receiver_ranking CASCADE;
CREATE VIEW receiver_ranking AS CREATE VIEW receiver_ranking AS
@ -23,7 +24,7 @@ def create_views():
ORDER BY max_distance DESC; ORDER BY max_distance DESC;
""") """)
db.session.execute(f""" db.session.execute("""
DROP VIEW IF EXISTS sender_ranking CASCADE; DROP VIEW IF EXISTS sender_ranking CASCADE;
CREATE VIEW sender_ranking AS CREATE VIEW sender_ranking AS
@ -44,6 +45,7 @@ def create_views():
db.session.commit() db.session.commit()
def create_timescaledb_views(): def create_timescaledb_views():
# 1. Since the reference_timestamps are strictly increasing we can set # 1. Since the reference_timestamps are strictly increasing we can set
# the parameter 'refresh_lag' to a very short time so the materialization # the parameter 'refresh_lag' to a very short time so the materialization
@ -51,11 +53,11 @@ def create_timescaledb_views():
# 2. The feature realtime aggregation from TimescaleDB is quite time consuming. # 2. The feature realtime aggregation from TimescaleDB is quite time consuming.
# So we set materialized_only=true # So we set materialized_only=true
### Sender statistics # --- Sender statistics ---
# These stats will be used in the daily ranking, so we make the bucket < 1d # These stats will be used in the daily ranking, so we make the bucket < 1d
db.session.execute(f""" db.session.execute(f"""
DROP VIEW IF EXISTS sender_stats_1h CASCADE; DROP VIEW IF EXISTS sender_stats_1h CASCADE;
CREATE VIEW sender_stats_1h CREATE VIEW sender_stats_1h
WITH (timescaledb.continuous, timescaledb.materialized_only=true, timescaledb.refresh_lag='5 minutes') AS WITH (timescaledb.continuous, timescaledb.materialized_only=true, timescaledb.refresh_lag='5 minutes') AS
SELECT SELECT
@ -90,7 +92,7 @@ def create_timescaledb_views():
GROUP BY bucket, sp.name, is_trustworthy; GROUP BY bucket, sp.name, is_trustworthy;
""") """)
### Receiver statistics # --- Receiver statistics ---
# These stats will be used in the daily ranking, so we make the bucket < 1d # These stats will be used in the daily ranking, so we make the bucket < 1d
db.session.execute(f""" db.session.execute(f"""
DROP VIEW IF EXISTS receiver_stats_1h CASCADE; DROP VIEW IF EXISTS receiver_stats_1h CASCADE;
@ -128,8 +130,8 @@ def create_timescaledb_views():
FROM sender_positions AS sp FROM sender_positions AS sp
GROUP BY bucket, sp.receiver_name, is_trustworthy; GROUP BY bucket, sp.receiver_name, is_trustworthy;
""") """)
### Relation statistics (sender <-> receiver) # --- Relation statistics (sender <-> receiver) ---
# these stats will be used on a >= 1d basis, so we make the bucket = 1d # these stats will be used on a >= 1d basis, so we make the bucket = 1d
db.session.execute(f""" db.session.execute(f"""
DROP VIEW IF EXISTS relation_stats_1d CASCADE; DROP VIEW IF EXISTS relation_stats_1d CASCADE;
@ -162,4 +164,4 @@ class MyView(db.Model):
autoload=True, autoload=True,
autoload_with=db.engine autoload_with=db.engine
) )
""" """

Wyświetl plik

@ -119,6 +119,7 @@ def import_airports(path="tests/SeeYou.cup"):
db.session.commit() db.session.commit()
print("Imported {} airports.".format(len(airports))) print("Imported {} airports.".format(len(airports)))
@user_cli.command("create_timescaledb_views") @user_cli.command("create_timescaledb_views")
def cmd_create_timescaledb_views(): def cmd_create_timescaledb_views():
"""Create TimescaleDB views.""" """Create TimescaleDB views."""
@ -126,11 +127,10 @@ def cmd_create_timescaledb_views():
create_timescaledb_views() create_timescaledb_views()
print("Done") print("Done")
@user_cli.command("create_views") @user_cli.command("create_views")
def cmd_create_views(): def cmd_create_views():
"""Create views.""" """Create views."""
create_views() create_views()
print("Done") print("Done")

Wyświetl plik

@ -15,6 +15,7 @@ from app import db
user_cli = AppGroup("export") user_cli = AppGroup("export")
user_cli.help = "Export data in several file formats." user_cli.help = "Export data in several file formats."
@user_cli.command("debug_sql") @user_cli.command("debug_sql")
@click.argument("start") @click.argument("start")
@click.argument("end") @click.argument("end")
@ -24,7 +25,7 @@ def debug_sql(start, end, name):
# First: get all the positions (and the receiver names for later) # First: get all the positions (and the receiver names for later)
sql_sender_positions = f""" sql_sender_positions = f"""
SELECT reference_timestamp, name, receiver_name, timestamp, location, track, ground_speed, altitude, aircraft_type, climb_rate, turn_rate, distance, bearing, agl SELECT reference_timestamp, name, receiver_name, timestamp, location, track, ground_speed, altitude, aircraft_type, climb_rate, turn_rate, distance, bearing, agl
FROM sender_positions FROM sender_positions
WHERE reference_timestamp BETWEEN '{start}' AND '{end}' AND name = '{name}' WHERE reference_timestamp BETWEEN '{start}' AND '{end}' AND name = '{name}'
ORDER BY reference_timestamp; ORDER BY reference_timestamp;
@ -38,7 +39,7 @@ def debug_sql(start, end, name):
receiver_names.append("'" + row[2] + "'") receiver_names.append("'" + row[2] + "'")
row = [f"'{r}'" if r else "DEFAULT" for r in row] row = [f"'{r}'" if r else "DEFAULT" for r in row]
sender_position_values.append(f"({','.join(row)})") sender_position_values.append(f"({','.join(row)})")
# Second: get the receivers # Second: get the receivers
sql_receivers = f""" sql_receivers = f"""
SELECT name, location SELECT name, location
@ -50,8 +51,8 @@ def debug_sql(start, end, name):
results = db.session.execute(sql_receivers) results = db.session.execute(sql_receivers)
for row in results: for row in results:
row = [f"'{r}'" if r else "DEFAULT" for r in row] row = [f"'{r}'" if r else "DEFAULT" for r in row]
receiver_values.append(f"({','.join(row)})") receiver_values.append(f"({','.join(row)})")
# Third: get the airports # Third: get the airports
sql_airports = f""" sql_airports = f"""
SELECT DISTINCT a.name, a.location, a.altitude, a.style, a.border SELECT DISTINCT a.name, a.location, a.altitude, a.style, a.border
@ -66,23 +67,22 @@ def debug_sql(start, end, name):
results = db.session.execute(sql_airports) results = db.session.execute(sql_airports)
for row in results: for row in results:
row = [f"'{r}'" if r else "DEFAULT" for r in row] row = [f"'{r}'" if r else "DEFAULT" for r in row]
airport_values.append(f"({','.join(row)})") airport_values.append(f"({','.join(row)})")
# Last: write all into file # Last: write all into file
with open(f'{start}_{end}_{name}.sql', 'w') as file: with open(f'{start}_{end}_{name}.sql', 'w') as file:
file.write(f'/*\n') file.write('/*\n')
file.write(f'OGN Python SQL Export\n') file.write('OGN Python SQL Export\n')
file.write(f'Created by: {os.getlogin()}\n') file.write(f'Created by: {os.getlogin()}\n')
file.write(f'Created at: {datetime.datetime.utcnow()}\n') file.write(f'Created at: {datetime.datetime.utcnow()}\n')
file.write(f'*/\n\n') file.write('*/\n\n')
file.write("INSERT INTO airports(name, location, altitude, style, border) VALUES\n") file.write("INSERT INTO airports(name, location, altitude, style, border) VALUES\n")
file.write(',\n'.join(airport_values) + ';\n\n') file.write(',\n'.join(airport_values) + ';\n\n')
file.write("INSERT INTO receivers(name, location) VALUES\n") file.write("INSERT INTO receivers(name, location) VALUES\n")
file.write(',\n'.join(receiver_values) + ';\n\n') file.write(',\n'.join(receiver_values) + ';\n\n')
file.write("INSERT INTO sender_positions(reference_timestamp, name, receiver_name, timestamp, location, track, ground_speed, altitude, aircraft_type, climb_rate, turn_rate, distance, bearing, agl) VALUES\n") file.write("INSERT INTO sender_positions(reference_timestamp, name, receiver_name, timestamp, location, track, ground_speed, altitude, aircraft_type, climb_rate, turn_rate, distance, bearing, agl) VALUES\n")
file.write(',\n'.join(sender_position_values) + ';\n\n') file.write(',\n'.join(sender_position_values) + ';\n\n')
@ -139,7 +139,7 @@ def igc(address, date):
return return
try: try:
sender = db.session.query(Sender).filter(Sender.address==address).one() sender = db.session.query(Sender).filter(Sender.address == address).one()
except NoResultFound as e: except NoResultFound as e:
print(f"No data for '{address}' in the DB") print(f"No data for '{address}' in the DB")
return return
@ -173,9 +173,9 @@ def igc(address, date):
points = ( points = (
db.session.query(SenderPosition) db.session.query(SenderPosition)
.filter(db.between(SenderPosition.reference_timestamp, f"{date} 00:00:00", f"{date} 23:59:59")) .filter(db.between(SenderPosition.reference_timestamp, f"{date} 00:00:00", f"{date} 23:59:59"))
.filter(SenderPosition.name == sender.name) .filter(SenderPosition.name == sender.name)
.order_by(SenderPosition.timestamp) .order_by(SenderPosition.timestamp)
) )
for point in points: for point in points:

Wyświetl plik

@ -84,7 +84,7 @@ def transfer():
"""Transfer data from redis to the database.""" """Transfer data from redis to the database."""
transfer_from_redis_to_database() transfer_from_redis_to_database()
@user_cli.command("printout") @user_cli.command("printout")
@click.option("--aprs_filter", default='') @click.option("--aprs_filter", default='')
@ -101,36 +101,3 @@ def printout(aprs_filter):
current_app.logger.warning("\nStop ogn gateway") current_app.logger.warning("\nStop ogn gateway")
client.disconnect() client.disconnect()
@user_cli.command("convert")
@click.argument("path")
def file_import(path):
"""Convert APRS logfiles into csv files for fast bulk import."""
for (root, dirs, files) in os.walk(path):
for file in sorted(files):
print(file)
convert(os.path.join(root, file))
@user_cli.command("calculate")
@click.argument("path")
def file_calculate(path):
"""Import csv files, calculate geographic features (distance, radial, agl, ...) and make data distinct."""
file_tuples = []
for (root, dirs, files) in os.walk(path):
for file in sorted(files):
if file.startswith('aircraft_beacons') and file.endswith('.csv.gz'):
ab_filename = os.path.join(root, file)
rb_filename = os.path.join(root, 'receiver' + file[8:])
target_filename = os.path.join(root, file + '2')
if os.path.isfile(target_filename):
print("Outputfile {} already exists. Skipping".format(target_filename))
else:
file_tuples.append((ab_filename, rb_filename, target_filename))
pbar = tqdm(file_tuples)
for file_tuple in pbar:
pbar.set_description("Converting {}".format(file_tuple[0]))
calculate(file_tuple[0], file_tuple[1], file_tuple[2])

Wyświetl plik

@ -40,7 +40,6 @@ def aprs_string_to_message(aprs_string):
bearing = int(message['bearing']) bearing = int(message['bearing'])
message['bearing'] = bearing if bearing < 360 else 0 message['bearing'] = bearing if bearing < 360 else 0
if "aircraft_type" in message: if "aircraft_type" in message:
message["aircraft_type"] = AircraftType(message["aircraft_type"]) if message["aircraft_type"] in AircraftType.list() else AircraftType.UNKNOWN message["aircraft_type"] = AircraftType(message["aircraft_type"]) if message["aircraft_type"] in AircraftType.list() else AircraftType.UNKNOWN

Wyświetl plik

@ -18,11 +18,11 @@ SENDER_POSITION_BEACON_FIELDS = [
"receiver_name", "receiver_name",
"timestamp", "timestamp",
"location", "location",
"track", "track",
"ground_speed", "ground_speed",
"altitude", "altitude",
"address_type", "address_type",
"aircraft_type", "aircraft_type",
"stealth", "stealth",
@ -38,7 +38,7 @@ SENDER_POSITION_BEACON_FIELDS = [
"hardware_version", "hardware_version",
"real_address", "real_address",
"signal_power", "signal_power",
"distance", "distance",
"bearing", "bearing",
"normalized_quality", "normalized_quality",
@ -66,7 +66,7 @@ RECEIVER_POSITION_BEACON_FIELDS = [
RECEIVER_STATUS_BEACON_FIELDS = [ RECEIVER_STATUS_BEACON_FIELDS = [
"reference_timestamp", "reference_timestamp",
"name", "name",
"dstcall", "dstcall",
"receiver_name", "receiver_name",
@ -90,7 +90,7 @@ def sender_position_message_to_csv_string(message, none_character=''):
csv_string = "{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},{21},{22},{23},{24},{25},{26},{27},{28},{29},{30}\n".format( csv_string = "{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},{21},{22},{23},{24},{25},{26},{27},{28},{29},{30}\n".format(
message['reference_timestamp'], message['reference_timestamp'],
message['name'], message['name'],
message['dstcall'], message['dstcall'],
message['relay'] if 'relay' in message and message['relay'] else none_character, message['relay'] if 'relay' in message and message['relay'] else none_character,
@ -101,8 +101,8 @@ def sender_position_message_to_csv_string(message, none_character=''):
message['track'] if 'track' in message and message['track'] else none_character, message['track'] if 'track' in message and message['track'] else none_character,
message['ground_speed'] if 'ground_speed' in message and message['ground_speed'] else none_character, message['ground_speed'] if 'ground_speed' in message and message['ground_speed'] else none_character,
int(message['altitude']) if message['altitude'] else none_character, int(message['altitude']) if message['altitude'] else none_character,
message['address_type'] if 'address_type' in message and message['address_type'] else none_character, #10 message['address_type'] if 'address_type' in message and message['address_type'] else none_character, # 10
message['aircraft_type'].name if 'aircraft_type' in message and message['aircraft_type'] else AircraftType.UNKNOWN.name, message['aircraft_type'].name if 'aircraft_type' in message and message['aircraft_type'] else AircraftType.UNKNOWN.name,
message['stealth'] if 'stealth' in message and message['stealth'] else none_character, message['stealth'] if 'stealth' in message and message['stealth'] else none_character,
message['address'] if 'address' in message and message['address'] else none_character, message['address'] if 'address' in message and message['address'] else none_character,
@ -112,12 +112,12 @@ def sender_position_message_to_csv_string(message, none_character=''):
message['error_count'] if 'error_count' in message and message['error_count'] else none_character, message['error_count'] if 'error_count' in message and message['error_count'] else none_character,
message['frequency_offset'] if 'frequency_offset' in message and message['frequency_offset'] else none_character, message['frequency_offset'] if 'frequency_offset' in message and message['frequency_offset'] else none_character,
message['gps_quality_horizontal'] if 'gps_quality_horizontal' in message and message['gps_quality_horizontal'] else none_character, message['gps_quality_horizontal'] if 'gps_quality_horizontal' in message and message['gps_quality_horizontal'] else none_character,
message['gps_quality_vertical'] if 'gps_quality_vertical' in message and message['gps_quality_vertical'] else none_character, #20 message['gps_quality_vertical'] if 'gps_quality_vertical' in message and message['gps_quality_vertical'] else none_character, # 20
message['software_version'] if 'software_version' in message and message['software_version'] else none_character, message['software_version'] if 'software_version' in message and message['software_version'] else none_character,
message['hardware_version'] if 'hardware_version' in message and message['hardware_version'] else none_character, message['hardware_version'] if 'hardware_version' in message and message['hardware_version'] else none_character,
message['real_address'] if 'real_address' in message and message['real_address'] else none_character, message['real_address'] if 'real_address' in message and message['real_address'] else none_character,
message['signal_power'] if 'signal_power' in message and message['signal_power'] else none_character, message['signal_power'] if 'signal_power' in message and message['signal_power'] else none_character,
message['distance'] if 'distance' in message and message['distance'] else none_character, message['distance'] if 'distance' in message and message['distance'] else none_character,
message['bearing'] if 'bearing' in message and message['bearing'] else none_character, message['bearing'] if 'bearing' in message and message['bearing'] else none_character,
message['normalized_quality'] if 'normalized_quality' in message and message['normalized_quality'] else none_character, message['normalized_quality'] if 'normalized_quality' in message and message['normalized_quality'] else none_character,
@ -132,7 +132,7 @@ def sender_position_message_to_csv_string(message, none_character=''):
def receiver_position_message_to_csv_string(message, none_character=''): def receiver_position_message_to_csv_string(message, none_character=''):
csv_string = "{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}\n".format( csv_string = "{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}\n".format(
message['reference_timestamp'], message['reference_timestamp'],
message['name'], message['name'],
message['dstcall'], message['dstcall'],
message['receiver_name'], message['receiver_name'],
@ -180,7 +180,7 @@ def sender_position_csv_strings_to_db(lines):
cursor.execute(f"CREATE TEMPORARY TABLE {tmp_tablename} (LIKE sender_positions) ON COMMIT DROP;") cursor.execute(f"CREATE TEMPORARY TABLE {tmp_tablename} (LIKE sender_positions) ON COMMIT DROP;")
cursor.copy_from(file=string_buffer, table=tmp_tablename, sep=",", columns=SENDER_POSITION_BEACON_FIELDS) cursor.copy_from(file=string_buffer, table=tmp_tablename, sep=",", columns=SENDER_POSITION_BEACON_FIELDS)
# Update agl # Update agl
cursor.execute(f""" cursor.execute(f"""
UPDATE {tmp_tablename} AS tmp UPDATE {tmp_tablename} AS tmp
@ -238,7 +238,7 @@ def sender_position_csv_strings_to_db(lines):
""") """)
# Update sender_infos FK -> senders # Update sender_infos FK -> senders
cursor.execute(f""" cursor.execute("""
UPDATE sender_infos AS si UPDATE sender_infos AS si
SET sender_id = s.id SET sender_id = s.id
FROM senders AS s FROM senders AS s
@ -316,7 +316,7 @@ def receiver_position_csv_strings_to_db(lines):
tmp.name, tmp.name,
tmp.timestamp, tmp.timestamp,
tmp.location, tmp.location,
tmp.altitude, tmp.altitude,
tmp.agl tmp.agl
@ -340,18 +340,18 @@ def receiver_position_csv_strings_to_db(lines):
""") """)
# Update receiver country # Update receiver country
cursor.execute(f""" cursor.execute("""
UPDATE receivers AS r UPDATE receivers AS r
SET SET
country_id = c.gid country_id = c.gid
FROM countries AS c FROM countries AS c
WHERE r.country_id IS NULL AND ST_Within(r.location, c.geom); WHERE r.country_id IS NULL AND ST_Within(r.location, c.geom);
""") """)
# Update receiver airport # Update receiver airport
cursor.execute(f""" cursor.execute("""
UPDATE receivers AS r UPDATE receivers AS r
SET SET
airport_id = ( airport_id = (
SELECT id SELECT id
FROM airports AS a FROM airports AS a
@ -400,7 +400,7 @@ def receiver_status_csv_strings_to_db(lines):
tmp.name, tmp.name,
tmp.timestamp, tmp.timestamp,
tmp.version, tmp.version,
tmp.platform, tmp.platform,

Wyświetl plik

@ -37,6 +37,7 @@ class Timer(object):
print("[{}]".format(self.name)) print("[{}]".format(self.name))
print("Elapsed: {}".format(time.time() - self.tstart)) print("Elapsed: {}".format(time.time() - self.tstart))
def export_to_path(path): def export_to_path(path):
connection = db.engine.raw_connection() connection = db.engine.raw_connection()
cursor = connection.cursor() cursor = connection.cursor()

Wyświetl plik

@ -3,4 +3,4 @@ from flask import Blueprint
bp = Blueprint("main", __name__) bp = Blueprint("main", __name__)
import app.main.routes import app.main.routes
import app.main.jinja_filters import app.main.jinja_filters

Wyświetl plik

@ -6,6 +6,7 @@ import time
import datetime import datetime
import math import math
@bp.app_template_filter() @bp.app_template_filter()
def timestamp_to_status(timestamp): def timestamp_to_status(timestamp):
if datetime.datetime.utcnow() - timestamp < datetime.timedelta(minutes=10): if datetime.datetime.utcnow() - timestamp < datetime.timedelta(minutes=10):
@ -15,6 +16,7 @@ def timestamp_to_status(timestamp):
else: else:
return '<b>OFFLINE</b>' return '<b>OFFLINE</b>'
@bp.app_template_filter() @bp.app_template_filter()
def to_html_link(obj): def to_html_link(obj):
if isinstance(obj, Airport): if isinstance(obj, Airport):
@ -40,6 +42,7 @@ def to_html_link(obj):
else: else:
raise NotImplementedError("cant apply filter 'to_html_link' to object {type(obj)}") raise NotImplementedError("cant apply filter 'to_html_link' to object {type(obj)}")
@bp.app_template_filter() @bp.app_template_filter()
def to_ordinal(rad): def to_ordinal(rad):
deg = math.degrees(rad) deg = math.degrees(rad)
@ -58,4 +61,4 @@ def to_ordinal(rad):
elif deg >= 247.5 and deg < 292.5: elif deg >= 247.5 and deg < 292.5:
return "E" return "E"
elif deg >= 292.5 and deg < 337.5: elif deg >= 292.5 and deg < 337.5:
return "NE" return "NE"

Wyświetl plik

@ -1,10 +1,11 @@
from app import db from app import db
from app.model import * from app.model import SenderDirectionStatistic
import random import random
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from matplotlib.figure import Figure from matplotlib.figure import Figure
def create_range_figure2(sender_id): def create_range_figure2(sender_id):
fig = Figure() fig = Figure()
axis = fig.add_subplot(1, 1, 1) axis = fig.add_subplot(1, 1, 1)
@ -14,6 +15,7 @@ def create_range_figure2(sender_id):
return fig return fig
def create_range_figure(sender_id): def create_range_figure(sender_id):
sds = db.session.query(SenderDirectionStatistic) \ sds = db.session.query(SenderDirectionStatistic) \
.filter(SenderDirectionStatistic.sender_id == sender_id) \ .filter(SenderDirectionStatistic.sender_id == sender_id) \
@ -24,11 +26,11 @@ def create_range_figure(sender_id):
fig = Figure() fig = Figure()
direction_data = sds.direction_data direction_data = sds.direction_data
max_range = max([r['max_range']/1000.0 for r in direction_data]) max_range = max([r['max_range'] / 1000.0 for r in direction_data])
theta = np.array([i['direction']/180*np.pi for i in direction_data]) theta = np.array([i['direction'] / 180 * np.pi for i in direction_data])
radii = np.array([i['max_range']/1000 if i['max_range'] > 0 else 0 for i in direction_data]) radii = np.array([i['max_range'] / 1000 if i['max_range'] > 0 else 0 for i in direction_data])
width = np.array([13/180*np.pi for i in direction_data]) width = np.array([13 / 180 * np.pi for i in direction_data])
colors = plt.cm.viridis(radii / max_range) colors = plt.cm.viridis(radii / max_range)
ax = fig.add_subplot(111, projection='polar') ax = fig.add_subplot(111, projection='polar')
@ -38,5 +40,5 @@ def create_range_figure(sender_id):
ax.set_theta_direction(-1) ax.set_theta_direction(-1)
fig.suptitle(f"Range between sender '{sds.sender.name}' and receiver '{sds.receiver.name}'") fig.suptitle(f"Range between sender '{sds.sender.name}' and receiver '{sds.receiver.name}'")
return fig return fig

Wyświetl plik

@ -25,7 +25,7 @@ def get_used_countries():
@cache.memoize() @cache.memoize()
def get_used_airports_by_country(sel_country): def get_used_airports_by_country(sel_country):
query = db.session.query(Airport).filter(Airport.country_code == sel_country).filter(TakeoffLanding.airport_id==Airport.id).filter(TakeoffLanding.country_id == Country.gid).order_by(Airport.name).distinct(Airport.name) query = db.session.query(Airport).filter(Airport.country_code == sel_country).filter(TakeoffLanding.airport_id == Airport.id).filter(TakeoffLanding.country_id == Country.gid).order_by(Airport.name).distinct(Airport.name)
return [used_airport for used_airport in query] return [used_airport for used_airport in query]
@ -45,17 +45,18 @@ def get_dates_for_airport(sel_airport):
@bp.route("/") @bp.route("/")
@bp.route("/index.html") @bp.route("/index.html")
def index(): def index():
today_beginning = datetime.combine(date.today(), time()) today_beginning = datetime.combine(date.today(), time())
senders_today = db.session.query(db.func.count(Sender.id)).filter(Sender.lastseen>=today_beginning).one()[0] senders_today = db.session.query(db.func.count(Sender.id)).filter(Sender.lastseen >= today_beginning).one()[0]
receivers_today = db.session.query(db.func.count(Receiver.id)).filter(Receiver.lastseen>=today_beginning).one()[0] receivers_today = db.session.query(db.func.count(Receiver.id)).filter(Receiver.lastseen >= today_beginning).one()[0]
takeoffs_today = db.session.query(db.func.count(TakeoffLanding.id)).filter(db.and_(TakeoffLanding.timestamp>=today_beginning, TakeoffLanding.is_takeoff==True)).one()[0] takeoffs_today = db.session.query(db.func.count(TakeoffLanding.id)).filter(db.and_(TakeoffLanding.timestamp >= today_beginning, TakeoffLanding.is_takeoff is True)).one()[0]
landings_today = db.session.query(db.func.count(TakeoffLanding.id)).filter(db.and_(TakeoffLanding.timestamp>=today_beginning, TakeoffLanding.is_takeoff==False)).one()[0] landings_today = db.session.query(db.func.count(TakeoffLanding.id)).filter(db.and_(TakeoffLanding.timestamp >= today_beginning, TakeoffLanding.is_takeoff is False)).one()[0]
sender_positions_today = db.session.query(db.func.sum(ReceiverStatistic.messages_count)).filter(ReceiverStatistic.date==date.today()).one()[0] sender_positions_today = db.session.query(db.func.sum(ReceiverStatistic.messages_count)).filter(ReceiverStatistic.date == date.today()).one()[0]
sender_positions_total = db.session.query(db.func.sum(ReceiverStatistic.messages_count)).one()[0] sender_positions_total = db.session.query(db.func.sum(ReceiverStatistic.messages_count)).one()[0]
last_logbook_entries = db.session.query(Logbook).order_by(Logbook.reference_timestamp.desc()).limit(10) last_logbook_entries = db.session.query(Logbook).order_by(Logbook.reference_timestamp.desc()).limit(10)
return render_template("index.html", return render_template(
"index.html",
senders_today=senders_today, senders_today=senders_today,
receivers_today=receivers_today, receivers_today=receivers_today,
takeoffs_today=takeoffs_today, takeoffs_today=takeoffs_today,
@ -80,13 +81,14 @@ def sender_detail():
return render_template("sender_detail.html", title="Sender", sender=sender) return render_template("sender_detail.html", title="Sender", sender=sender)
@bp.route("/range_view.png") @bp.route("/range_view.png")
def range_view(): def range_view():
import io import io
from flask import Response from flask import Response
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
sender_id = request.args.get("sender_id") sender_id = request.args.get("sender_id")
fig = create_range_figure(sender_id) fig = create_range_figure(sender_id)
@ -210,24 +212,28 @@ def download_flight():
return send_file(buffer, as_attachment=True, attachment_filename="wtf.igc", mimetype="text/plain") return send_file(buffer, as_attachment=True, attachment_filename="wtf.igc", mimetype="text/plain")
@bp.route("/sender_ranking.html") @bp.route("/sender_ranking.html")
def sender_ranking(): def sender_ranking():
sender_statistics = db.session.query(SenderStatistic) \ sender_statistics = db.session.query(SenderStatistic) \
.filter(db.and_(SenderStatistic.date==date.today(), SenderStatistic.is_trustworthy==True)) \ .filter(db.and_(SenderStatistic.date == date.today(), SenderStatistic.is_trustworthy is True)) \
.order_by(SenderStatistic.max_distance.desc()) \ .order_by(SenderStatistic.max_distance.desc()) \
.all() .all()
return render_template("sender_ranking.html", return render_template(
"sender_ranking.html",
title="Sender Ranking", title="Sender Ranking",
ranking=sender_statistics) ranking=sender_statistics)
@bp.route("/receiver_ranking.html") @bp.route("/receiver_ranking.html")
def receiver_ranking(): def receiver_ranking():
receiver_statistics = db.session.query(ReceiverStatistic) \ receiver_statistics = db.session.query(ReceiverStatistic) \
.filter(db.and_(ReceiverStatistic.date==date.today(), ReceiverStatistic.is_trustworthy==True)) \ .filter(db.and_(ReceiverStatistic.date == date.today(), ReceiverStatistic.is_trustworthy is True)) \
.order_by(ReceiverStatistic.max_distance.desc()) \ .order_by(ReceiverStatistic.max_distance.desc()) \
.all() .all()
return render_template("receiver_ranking.html", return render_template(
"receiver_ranking.html",
title="Receiver Ranking", title="Receiver Ranking",
ranking=receiver_statistics) ranking=receiver_statistics)

Wyświetl plik

@ -37,7 +37,7 @@ class Logbook(db.Model):
@duration.expression @duration.expression
def duration(cls): def duration(cls):
return db.case({False: None, True: cls.landing_timestamp - cls.takeoff_timestamp}, cls.landing_timestamp != db.null() and cls.takeoff_timestamp != db.null()) return db.case({False: None, True: cls.landing_timestamp - cls.takeoff_timestamp}, cls.landing_timestamp != db.null() and cls.takeoff_timestamp != db.null())
@hybrid_property @hybrid_property
def reference_timestamp(self): def reference_timestamp(self):
return self.takeoff_timestamp if self.takeoff_timestamp is not None else self.landing_timestamp return self.takeoff_timestamp if self.takeoff_timestamp is not None else self.landing_timestamp
@ -50,7 +50,7 @@ class Logbook(db.Model):
# FIXME: does not work... # FIXME: does not work...
# FIXME: this does not throw an error as the __table_args__ above, but there is no index created # FIXME: this does not throw an error as the __table_args__ above, but there is no index created
#_wrapped_case = f"({db.case(whens={True: Logbook.takeoff_timestamp, False: Logbook.landing_timestamp}, value=Logbook.takeoff_timestamp != db.null())})" #_wrapped_case = f"({db.case(whens={True: Logbook.takeoff_timestamp, False: Logbook.landing_timestamp}, value=Logbook.takeoff_timestamp != db.null())})"
#Index("idx_logbook_reference_timestamp", _wrapped_case) #Index("idx_logbook_reference_timestamp", _wrapped_case)
# TODO: # TODO:

Wyświetl plik

@ -47,11 +47,10 @@ class Receiver(db.Model):
def airports_nearby(self): def airports_nearby(self):
query = ( query = (
db.session.query(Airport, db.func.st_distance_sphere(self.location_wkt, Airport.location_wkt), db.func.st_azimuth(self.location_wkt, Airport.location_wkt)) db.session.query(Airport, db.func.st_distance_sphere(self.location_wkt, Airport.location_wkt), db.func.st_azimuth(self.location_wkt, Airport.location_wkt))
.filter(db.func.st_contains(db.func.st_buffer(Airport.location_wkt, 1), self.location_wkt)) .filter(db.func.st_contains(db.func.st_buffer(Airport.location_wkt, 1), self.location_wkt))
.filter(Airport.style.in_((2,4,5))) .filter(Airport.style.in_((2, 4, 5)))
.order_by(db.func.st_distance_sphere(self.location_wkt, Airport.location_wkt).asc()) .order_by(db.func.st_distance_sphere(self.location_wkt, Airport.location_wkt).asc())
.limit(5) .limit(5)
) )
airports = [(airport,distance,azimuth) for airport, distance, azimuth in query] airports = [(airport, distance, azimuth) for airport, distance, azimuth in query]
return airports return airports

Wyświetl plik

@ -2,6 +2,7 @@ from app import db
from sqlalchemy.dialects.postgresql import JSON from sqlalchemy.dialects.postgresql import JSON
class SenderDirectionStatistic(db.Model): class SenderDirectionStatistic(db.Model):
__tablename__ = "sender_direction_statistics" __tablename__ = "sender_direction_statistics"

Wyświetl plik

@ -4,6 +4,7 @@ from .aircraft_type import AircraftType
#from sqlalchemy.dialects.postgresql import ENUM #from sqlalchemy.dialects.postgresql import ENUM
class SenderInfo(db.Model): class SenderInfo(db.Model):
__tablename__ = "sender_infos" __tablename__ = "sender_infos"

Wyświetl plik

@ -48,7 +48,7 @@ class SenderPosition(db.Model):
hardware_version = db.Column(db.SmallInteger) hardware_version = db.Column(db.SmallInteger)
real_address = db.Column(db.String(6)) real_address = db.Column(db.String(6))
signal_power = db.Column(db.Float(precision=2)) signal_power = db.Column(db.Float(precision=2))
#proximity = None #proximity = None
# Calculated values (from parser) # Calculated values (from parser)
@ -60,4 +60,3 @@ class SenderPosition(db.Model):
location_mgrs = db.Column(db.String(15)) # full mgrs (15 chars) location_mgrs = db.Column(db.String(15)) # full mgrs (15 chars)
location_mgrs_short = db.Column(db.String(9)) # reduced mgrs (9 chars), e.g. used for melissas range tool location_mgrs_short = db.Column(db.String(9)) # reduced mgrs (9 chars), e.g. used for melissas range tool
agl = db.Column(db.Float(precision=2)) agl = db.Column(db.Float(precision=2))

Wyświetl plik

@ -4,6 +4,7 @@ from .aircraft_type import AircraftType
from sqlalchemy.dialects.postgresql import ENUM from sqlalchemy.dialects.postgresql import ENUM
class SenderPositionStatistic(db.Model): class SenderPositionStatistic(db.Model):
__tablename__ = "sender_position_statistics" __tablename__ = "sender_position_statistics"

Wyświetl plik

@ -9,13 +9,14 @@ from app.collect.gateway import transfer_from_redis_to_database
from app import db, celery from app import db, celery
@celery.task(name="transfer_to_database") @celery.task(name="transfer_to_database")
def transfer_to_database(): def transfer_to_database():
"""Transfer APRS data from Redis to database.""" """Transfer APRS data from Redis to database."""
result = transfer_from_redis_to_database() result = transfer_from_redis_to_database()
return result return result
@celery.task(name="update_takeoff_landings") @celery.task(name="update_takeoff_landings")
def update_takeoff_landings(last_minutes): def update_takeoff_landings(last_minutes):

Wyświetl plik

@ -10,7 +10,7 @@ def update_statistics(date_str=None):
if date_str is None: if date_str is None:
date_str = datetime.utcnow().strftime("%Y-%m-%d") date_str = datetime.utcnow().strftime("%Y-%m-%d")
# Update relation statistics # Update relation statistics
db.session.execute(f""" db.session.execute(f"""
DELETE FROM relation_statistics DELETE FROM relation_statistics
WHERE date = '{date_str}'; WHERE date = '{date_str}';
@ -22,7 +22,7 @@ def update_statistics(date_str=None):
tmp.receiver_id, tmp.receiver_id,
is_trustworthy, is_trustworthy,
MAX(tmp.max_distance) AS max_distance, MAX(tmp.max_distance) AS max_distance,
MAX(tmp.max_normalized_quality) AS max_normalized_quality, MAX(tmp.max_normalized_quality) AS max_normalized_quality,
SUM(tmp.messages_count) AS messages_count, SUM(tmp.messages_count) AS messages_count,
@ -43,7 +43,7 @@ def update_statistics(date_str=None):
tmp.sender_id, tmp.sender_id,
is_trustworthy, is_trustworthy,
MAX(tmp.max_distance) AS max_distance, MAX(tmp.max_distance) AS max_distance,
MAX(tmp.max_normalized_quality) AS max_normalized_quality, MAX(tmp.max_normalized_quality) AS max_normalized_quality,
SUM(tmp.messages_count) AS messages_count, SUM(tmp.messages_count) AS messages_count,
@ -65,7 +65,7 @@ def update_statistics(date_str=None):
tmp.receiver_id, tmp.receiver_id,
is_trustworthy, is_trustworthy,
MAX(tmp.max_distance) AS max_distance, MAX(tmp.max_distance) AS max_distance,
MAX(tmp.max_normalized_quality) AS max_normalized_quality, MAX(tmp.max_normalized_quality) AS max_normalized_quality,
SUM(tmp.messages_count) AS messages_count, SUM(tmp.messages_count) AS messages_count,
@ -84,7 +84,7 @@ def update_sender_direction_statistics():
""" Update sender_direction_statistics.""" """ Update sender_direction_statistics."""
db.session.execute(""" db.session.execute("""
DELETE FROM sender_direction_statistics; DELETE FROM sender_direction_statistics;
INSERT INTO sender_direction_statistics(sender_id, receiver_id, directions_count, messages_count, direction_data) INSERT INTO sender_direction_statistics(sender_id, receiver_id, directions_count, messages_count, direction_data)
SELECT SELECT
@ -93,7 +93,7 @@ def update_sender_direction_statistics():
COUNT(sq2.*) AS directions_count, COUNT(sq2.*) AS directions_count,
SUM(sq2.messages_count) AS messages_count, SUM(sq2.messages_count) AS messages_count,
json_agg(json_build_object('direction', direction, 'messages_count', messages_count, 'max_range', max_range)) AS direction_data json_agg(json_build_object('direction', direction, 'messages_count', messages_count, 'max_range', max_range)) AS direction_data
FROM ( FROM (
SELECT SELECT
sq.sender_id, sq.sender_id,
sq.receiver_id, sq.receiver_id,

Wyświetl plik

@ -137,8 +137,9 @@ def open_file(filename):
f = open(filename, "rt", encoding="latin-1") f = open(filename, "rt", encoding="latin-1")
return f return f
def get_sql_trustworthy(source_table_alias): def get_sql_trustworthy(source_table_alias):
MIN_DISTANCE = 1000 MIN_DISTANCE = 1000
MAX_DISTANCE = 640000 MAX_DISTANCE = 640000
MAX_NORMALIZED_QUALITY = 40 # this is enough for > 640km MAX_NORMALIZED_QUALITY = 40 # this is enough for > 640km
MAX_ERROR_COUNT = 5 MAX_ERROR_COUNT = 5
@ -149,4 +150,4 @@ def get_sql_trustworthy(source_table_alias):
AND ({source_table_alias}.normalized_quality IS NOT NULL AND {source_table_alias}.normalized_quality < {MAX_NORMALIZED_QUALITY}) AND ({source_table_alias}.normalized_quality IS NOT NULL AND {source_table_alias}.normalized_quality < {MAX_NORMALIZED_QUALITY})
AND ({source_table_alias}.error_count IS NULL OR {source_table_alias}.error_count < {MAX_ERROR_COUNT}) AND ({source_table_alias}.error_count IS NULL OR {source_table_alias}.error_count < {MAX_ERROR_COUNT})
AND ({source_table_alias}.climb_rate IS NULL OR {source_table_alias}.climb_rate BETWEEN -{MAX_CLIMB_RATE} AND {MAX_CLIMB_RATE}) AND ({source_table_alias}.climb_rate IS NULL OR {source_table_alias}.climb_rate BETWEEN -{MAX_CLIMB_RATE} AND {MAX_CLIMB_RATE})
""" """

Wyświetl plik

@ -1,25 +1,27 @@
import os import os
class BaseConfig: class BaseConfig:
SECRET_KEY = "i-like-ogn" SECRET_KEY = "i-like-ogn"
# Flask-Cache stuff # Flask-Cache stuff
CACHE_TYPE = "simple" CACHE_TYPE = "simple"
CACHE_DEFAULT_TIMEOUT = 300 CACHE_DEFAULT_TIMEOUT = 300
# Redis stuff # Redis stuff
REDIS_URL = "redis://localhost:6379/0" REDIS_URL = "redis://localhost:6379/0"
# Celery stuff # Celery stuff
BROKER_URL = os.environ.get("BROKER_URL", REDIS_URL) BROKER_URL = os.environ.get("BROKER_URL", REDIS_URL)
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", REDIS_URL) CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", REDIS_URL)
APRS_USER = "OGNPYTHON" APRS_USER = "OGNPYTHON"
class DefaultConfig(BaseConfig): class DefaultConfig(BaseConfig):
SQLALCHEMY_DATABASE_URI = os.environ.get("SQLALCHEMY_DATABASE_URI", "postgresql://postgres:postgres@localhost:5432/ogn") SQLALCHEMY_DATABASE_URI = os.environ.get("SQLALCHEMY_DATABASE_URI", "postgresql://postgres:postgres@localhost:5432/ogn")
SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_TRACK_MODIFICATIONS = False
# Celery beat stuff # Celery beat stuff
from celery.schedules import crontab from celery.schedules import crontab
from datetime import timedelta from datetime import timedelta
@ -33,17 +35,19 @@ class DefaultConfig(BaseConfig):
"update_ddb_daily": {"task": "import_ddb", "schedule": timedelta(days=1)}, "update_ddb_daily": {"task": "import_ddb", "schedule": timedelta(days=1)},
#"update_logbook_max_altitude": {"task": "update_logbook_max_altitude", "schedule": timedelta(minutes=1), "kwargs": {"offset_days": 0}}, #"update_logbook_max_altitude": {"task": "update_logbook_max_altitude", "schedule": timedelta(minutes=1), "kwargs": {"offset_days": 0}},
#"purge_old_data": {"task": "purge_old_data", "schedule": timedelta(hours=1), "kwargs": {"max_hours": 48}}, #"purge_old_data": {"task": "purge_old_data", "schedule": timedelta(hours=1), "kwargs": {"max_hours": 48}},
} }
class DevelopmentConfig(BaseConfig): class DevelopmentConfig(BaseConfig):
SQLALCHEMY_DATABASE_URI = "postgresql://postgres:postgres@localhost:5432/ogn_test" SQLALCHEMY_DATABASE_URI = "postgresql://postgres:postgres@localhost:5432/ogn_test"
SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False SQLALCHEMY_ECHO = False
configs = { configs = {
'default': DefaultConfig, 'default': DefaultConfig,
'development': DevelopmentConfig, 'development': DevelopmentConfig,
'testing': DevelopmentConfig 'testing': DevelopmentConfig
} }

Wyświetl plik

@ -1,2 +1,2 @@
[flake8] [flake8]
ignore = F401, F841, E402, E501, E126 ignore = F401, F841, E402, E501, E126, E265

Wyświetl plik

@ -76,7 +76,6 @@ class TestLogbook(TestBaseDB):
self.assertEqual(entries[0].takeoff_airport_id, self.koenigsdorf.id) self.assertEqual(entries[0].takeoff_airport_id, self.koenigsdorf.id)
self.assertEqual(entries[0].landing_airport_id, self.koenigsdorf.id) self.assertEqual(entries[0].landing_airport_id, self.koenigsdorf.id)
@unittest.skip('needs information about airport timezone') @unittest.skip('needs information about airport timezone')
def test_takeoff_and_landing_on_different_days(self): def test_takeoff_and_landing_on_different_days(self):
db.session.add(self.takeoff_koenigsdorf_dd0815) db.session.add(self.takeoff_koenigsdorf_dd0815)