Disk-based caching for spots so they survive a software restart

This commit is contained in:
Ian Renton
2025-10-04 09:12:40 +01:00
parent bfcaf6e261
commit c785137258
5 changed files with 29 additions and 35 deletions

2
.gitignore vendored
View File

@@ -1,7 +1,7 @@
/.venv /.venv
__pycache__ __pycache__
*.pyc *.pyc
/.data_store /.spots_cache
/.qrz_callsign_lookup_cache /.qrz_callsign_lookup_cache
/sota_summit_data_cache.sqlite /sota_summit_data_cache.sqlite
/gma_ref_info_cache.sqlite /gma_ref_info_cache.sqlite

View File

@@ -1,5 +1,5 @@
import logging import logging
from datetime import datetime, timedelta from datetime import datetime
from threading import Timer from threading import Timer
from time import sleep from time import sleep
@@ -10,10 +10,9 @@ import pytz
class CleanupTimer: class CleanupTimer:
# Constructor # Constructor
def __init__(self, spot_list, cleanup_interval, max_spot_age): def __init__(self, spots, cleanup_interval):
self.spot_list = spot_list self.spots = spots
self.cleanup_interval = cleanup_interval self.cleanup_interval = cleanup_interval
self.max_spot_age = max_spot_age
self.cleanup_timer = None self.cleanup_timer = None
self.last_cleanup_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_cleanup_time = datetime.min.replace(tzinfo=pytz.UTC)
self.status = "Starting" self.status = "Starting"
@@ -30,10 +29,7 @@ class CleanupTimer:
def cleanup(self): def cleanup(self):
try: try:
# Perform cleanup # Perform cleanup
for spot in self.spot_list: self.spots.expire()
if not spot.time or spot.time < datetime.now(pytz.UTC) - timedelta(seconds=self.max_spot_age):
self.spot_list.remove(spot)
self.status = "OK" self.status = "OK"
self.last_cleanup_time = datetime.now(pytz.UTC) self.last_cleanup_time = datetime.now(pytz.UTC)

View File

@@ -3,7 +3,7 @@ from datetime import datetime
import pytz import pytz
from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION
from core.config import config, SERVER_OWNER_CALLSIGN from core.config import config, SERVER_OWNER_CALLSIGN, MAX_SPOT_AGE
# Generic data provider class. Subclasses of this query the individual APIs for data. # Generic data provider class. Subclasses of this query the individual APIs for data.
@@ -19,11 +19,11 @@ class Provider:
self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC)
self.last_spot_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_spot_time = datetime.min.replace(tzinfo=pytz.UTC)
self.status = "Not Started" if self.enabled else "Disabled" self.status = "Not Started" if self.enabled else "Disabled"
self.spot_list = None self.spots = None
# Set up the provider, e.g. giving it the spot list to work from # Set up the provider, e.g. giving it the spot list to work from
def setup(self, spot_list): def setup(self, spots):
self.spot_list = spot_list self.spots = spots
# Start the provider. This should return immediately after spawning threads to access the remote resources # Start the provider. This should return immediately after spawning threads to access the remote resources
def start(self): def start(self):
@@ -38,8 +38,8 @@ class Provider:
if spot.time > self.last_spot_time: if spot.time > self.last_spot_time:
# Fill in any blanks # Fill in any blanks
spot.infer_missing() spot.infer_missing()
# Append to the list # Add to the list
self.spot_list.append(spot) self.spots.add(spot.guid, spot, expire=MAX_SPOT_AGE)
self.last_spot_time = max(map(lambda s: s.time, spots)) self.last_spot_time = max(map(lambda s: s.time, spots))
# Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots # Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots
@@ -48,8 +48,8 @@ class Provider:
def submit(self, spot): def submit(self, spot):
# Fill in any blanks # Fill in any blanks
spot.infer_missing() spot.infer_missing()
# Append to the list # Add to the list
self.spot_list.append(spot) self.spots.add(spot.guid, spot, expire=MAX_SPOT_AGE)
self.last_spot_time = spot.time self.last_spot_time = spot.time
# Stop any threads and prepare for application shutdown # Stop any threads and prepare for application shutdown

View File

@@ -16,10 +16,10 @@ from core.utils import serialize_everything
class WebServer: class WebServer:
# Constructor # Constructor
def __init__(self, spot_list, status_data, port): def __init__(self, spots, status_data, port):
self.last_page_access_time = None self.last_page_access_time = None
self.last_api_access_time = None self.last_api_access_time = None
self.spot_list = spot_list self.spots = spots
self.status_data = status_data self.status_data = status_data
self.port = port self.port = port
self.thread = Thread(target=self.run) self.thread = Thread(target=self.run)
@@ -90,7 +90,11 @@ class WebServer:
# value or a comma-separated list. # value or a comma-separated list.
# We can provide a "limit" number as well. Spots are always returned newest-first; "limit" limits to only the # We can provide a "limit" number as well. Spots are always returned newest-first; "limit" limits to only the
# most recent X spots. # most recent X spots.
spots = sorted(self.spot_list, key=lambda spot: spot.time, reverse=True) spot_guids = list(self.spots.iterkeys())
spots = []
for k in spot_guids:
spots.append(self.spots.get(k))
spots = sorted(spots, key=lambda spot: spot.time, reverse=True)
for k in query.keys(): for k in query.keys():
match k: match k:
case "since": case "since":

View File

@@ -9,24 +9,16 @@ from time import sleep
import importlib import importlib
import psutil import psutil
import pytz import pytz
from diskcache import Cache
from core.cleanup import CleanupTimer from core.cleanup import CleanupTimer
from core.config import config, MAX_SPOT_AGE, WEB_SERVER_PORT, SERVER_OWNER_CALLSIGN from core.config import config, MAX_SPOT_AGE, WEB_SERVER_PORT, SERVER_OWNER_CALLSIGN
from core.constants import SOFTWARE_VERSION from core.constants import SOFTWARE_VERSION
from providers.aprsis import APRSIS from core.utils import QRZ_CALLSIGN_DATA_CACHE
from providers.dxcluster import DXCluster
from providers.gma import GMA
from providers.hema import HEMA
from providers.parksnpeaks import ParksNPeaks
from providers.pota import POTA
from providers.rbn import RBN
from providers.sota import SOTA
from providers.wwbota import WWBOTA
from providers.wwff import WWFF
from server.webserver import WebServer from server.webserver import WebServer
# Globals # Globals
spot_list = [] spots = Cache('.spots_cache')
status_data = {} status_data = {}
providers = [] providers = []
cleanup_timer = None cleanup_timer = None
@@ -41,6 +33,8 @@ def shutdown(sig, frame):
if p.enabled: if p.enabled:
p.stop() p.stop()
cleanup_timer.stop() cleanup_timer.stop()
QRZ_CALLSIGN_DATA_CACHE.close()
spots.close()
# Utility method to get a data provider based on the class specified in its config entry. # Utility method to get a data provider based on the class specified in its config entry.
def get_provider_from_config(config_providers_entry): def get_provider_from_config(config_providers_entry):
@@ -71,18 +65,18 @@ if __name__ == '__main__':
for entry in config["providers"]: for entry in config["providers"]:
providers.append(get_provider_from_config(entry)) providers.append(get_provider_from_config(entry))
# Set up data providers # Set up data providers
for p in providers: p.setup(spot_list=spot_list) for p in providers: p.setup(spots=spots)
# Start data providers # Start data providers
for p in providers: for p in providers:
if p.enabled: if p.enabled:
p.start() p.start()
# Set up timer to clear spot list of old data # Set up timer to clear spot list of old data
cleanup_timer = CleanupTimer(spot_list=spot_list, cleanup_interval=60, max_spot_age=MAX_SPOT_AGE) cleanup_timer = CleanupTimer(spots=spots, cleanup_interval=60)
cleanup_timer.start() cleanup_timer.start()
# Set up web server # Set up web server
web_server = WebServer(spot_list=spot_list, status_data=status_data, port=WEB_SERVER_PORT) web_server = WebServer(spots=spots, status_data=status_data, port=WEB_SERVER_PORT)
web_server.start() web_server.start()
logging.info("Startup complete.") logging.info("Startup complete.")
@@ -92,7 +86,7 @@ if __name__ == '__main__':
sleep(5) sleep(5)
status_data["uptime"] = str(datetime.now(pytz.UTC) - startup_time).split(".")[0] status_data["uptime"] = str(datetime.now(pytz.UTC) - startup_time).split(".")[0]
status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3) status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3)
status_data["num_spots"] = len(spot_list) status_data["num_spots"] = len(spots)
status_data["providers"] = list(map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, "last_updated": p.last_update_time, "last_spot": p.last_spot_time}, providers)) status_data["providers"] = list(map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, "last_updated": p.last_update_time, "last_spot": p.last_spot_time}, providers))
status_data["cleanup"] = {"status": cleanup_timer.status, "last_ran": cleanup_timer.last_cleanup_time} status_data["cleanup"] = {"status": cleanup_timer.status, "last_ran": cleanup_timer.last_cleanup_time}
status_data["webserver"] = {"status": web_server.status, "last_api_access": web_server.last_api_access_time, "last_page_access": web_server.last_page_access_time} status_data["webserver"] = {"status": web_server.status, "last_api_access": web_server.last_api_access_time, "last_page_access": web_server.last_page_access_time}