Implemented old spot cleanup thread

This commit is contained in:
Ian Renton
2025-09-27 12:09:16 +01:00
parent b1346e26ea
commit efa4c402df
8 changed files with 73 additions and 13 deletions

46
core/cleanup.py Normal file
View File

@@ -0,0 +1,46 @@
# Provides a timed cleanup of the spot list.
import logging
from datetime import datetime, timedelta
from threading import Timer
from time import sleep
import pytz
class CleanupTimer:
# Constructor
def __init__(self, spot_list, cleanup_interval, max_spot_age):
self.spot_list = spot_list
self.cleanup_interval = cleanup_interval
self.max_spot_age = max_spot_age
self.cleanup_timer = None
self.last_cleanup_time = datetime.min.replace(tzinfo=pytz.UTC)
self.status = "Starting"
# Start the cleanup timer
def start(self):
self.cleanup()
# Stop any threads and prepare for application shutdown
def stop(self):
self.cleanup_timer.cancel()
# Perform cleanup and reschedule next timer
def cleanup(self):
try:
# Perform cleanup
for spot in self.spot_list:
if not spot.time or spot.time < datetime.now(pytz.UTC) - timedelta(seconds=self.max_spot_age):
self.spot_list.remove(spot)
self.status = "OK"
self.last_cleanup_time = datetime.now(pytz.UTC)
except Exception as e:
self.status = "Error"
logging.exception("Exception in Cleanup thread")
sleep(1)
self.cleanup_timer = Timer(self.cleanup_interval, self.cleanup)
self.cleanup_timer.start()

View File

@@ -3,7 +3,10 @@ from data.band import Band
# General software
SOFTWARE_NAME = "Metaspot by M0TRT"
SOFTWARE_VERSION = "0.1"
# Todo make configurable
SERVER_OWNER_CALLSIGN = "M0TRT"
MAX_SPOT_AGE_SEC = 3600
# Modes
CW_MODES = ["CW"]

View File

@@ -22,7 +22,8 @@ def infer_mode_family_from_mode(mode):
elif mode.upper() in DATA_MODES:
return "DATA"
else:
print("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
if mode.upper() != "OTHER":
print("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
return None
# Infer a band from a frequency in kHz

11
main.py
View File

@@ -1,6 +1,8 @@
# Main script
import signal
from core.cleanup import CleanupTimer
from core.constants import MAX_SPOT_AGE_SEC
from providers.dxcluster import DXCluster
from providers.gma import GMA
from providers.hema import HEMA
@@ -10,15 +12,16 @@ from providers.sota import SOTA
from providers.wwbota import WWBOTA
from providers.wwff import WWFF
# Shutdown function
def shutdown(sig, frame):
print("Stopping program, this may take a few seconds...")
for p in providers: p.stop()
cleanup_timer.stop()
# Main function
if __name__ == '__main__':
print("Starting...")
# Shut down gracefully on SIGINT
signal.signal(signal.SIGINT, shutdown)
@@ -41,7 +44,9 @@ if __name__ == '__main__':
# Start data providers
for p in providers: p.start()
# todo thread to clear spot list of old data
# Set up timer to clear spot list of old data
cleanup_timer = CleanupTimer(spot_list=spot_list, cleanup_interval=60, max_spot_age=MAX_SPOT_AGE_SEC)
cleanup_timer.start()
# Todo serve spot API
# Todo spot API arguments e.g. "since" based on received_time of spots, sources, sigs, dx cont, dxcc, de cont, band, mode, filter out qrt, filter pre-qsy
@@ -49,6 +54,8 @@ if __name__ == '__main__':
# Todo serve apidocs
# Todo serve website
print("Startup complete.")
# TODO NOTES FOR NGINX REVERSE PROXY
# local cache time of 15 sec to avoid over burdening python?

View File

@@ -36,12 +36,15 @@ class GMA(HTTPProvider):
sig_refs=[source_spot["REF"]],
sig_refs_names=[source_spot["NAME"]],
time=datetime.strptime(source_spot["DATE"] + source_spot["TIME"], "%Y%m%d%H%M").replace(tzinfo=pytz.UTC),
latitude=float(source_spot["LAT"]),
longitude=float(source_spot["LON"]))
latitude=float(source_spot["LAT"]) if (source_spot["LAT"] != "") else None, # Seen GMA spots with no lat/lon
longitude=float(source_spot["LON"]) if (source_spot["LON"] != "") else None)
# GMA doesn't give what programme (SIG) the reference is for until we separately look it up.
ref_info = self.REF_INFO_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"], headers=self.HTTP_HEADERS).json()
spot.sig = ref_info["reftype"]
ref_response = self.REF_INFO_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"], headers=self.HTTP_HEADERS)
# Sometimes this is blank, so handle that
if ref_response.text is not None and ref_response.text != "":
ref_info = ref_response.json()
spot.sig = ref_info["reftype"]
# Fill in any missing data
spot.infer_missing()

View File

@@ -1,8 +1,9 @@
import logging
from datetime import datetime, timezone
from datetime import datetime
from threading import Timer, Thread
from time import sleep
import pytz
import requests
from providers.provider import Provider
@@ -42,7 +43,7 @@ class HTTPProvider(Provider):
self.submit(new_spots)
self.status = "OK"
self.last_update_time = datetime.now(timezone.utc)
self.last_update_time = datetime.now(pytz.UTC)
except Exception as e:
self.status = "Error"

View File

@@ -21,13 +21,12 @@ class ParksNPeaks(HTTPProvider):
new_spots = []
# Iterate through source data
for source_spot in http_response.json():
print(source_spot)
# Convert to our spot format
spot = Spot(source=self.name(),
source_id=source_spot["actID"],
dx_call=source_spot["actCallsign"].upper(),
de_call=source_spot["actSpoter"].upper(), # typo exists in API
freq=float(source_spot["actFreq"]) * 1000,
freq=float(source_spot["actFreq"]) * 1000 if (source_spot["actFreq"] != "") else None, # Seen PNP spots with empty frequency!
mode=source_spot["actMode"].upper(),
comment=source_spot["actComments"],
sig=source_spot["actClass"],

View File

@@ -2,14 +2,14 @@ from datetime import datetime
import pytz
from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION
from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION, SERVER_OWNER_CALLSIGN
# Generic data provider class. Subclasses of this query the individual APIs for data.
class Provider:
# HTTP headers used for providers that use HTTP
HTTP_HEADERS = { "User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION }
HTTP_HEADERS = { "User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION + " (operated by " + SERVER_OWNER_CALLSIGN + ")" }
# Constructor
def __init__(self):