diff --git a/.gitignore b/.gitignore index 4caed11..6fe7c65 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,7 @@ /.venv __pycache__ *.pyc -/.data_store +/.spots_cache /.qrz_callsign_lookup_cache /sota_summit_data_cache.sqlite /gma_ref_info_cache.sqlite diff --git a/core/cleanup.py b/core/cleanup.py index 5319163..89a7ba8 100644 --- a/core/cleanup.py +++ b/core/cleanup.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime, timedelta +from datetime import datetime from threading import Timer from time import sleep @@ -10,10 +10,9 @@ import pytz class CleanupTimer: # Constructor - def __init__(self, spot_list, cleanup_interval, max_spot_age): - self.spot_list = spot_list + def __init__(self, spots, cleanup_interval): + self.spots = spots self.cleanup_interval = cleanup_interval - self.max_spot_age = max_spot_age self.cleanup_timer = None self.last_cleanup_time = datetime.min.replace(tzinfo=pytz.UTC) self.status = "Starting" @@ -30,10 +29,7 @@ class CleanupTimer: def cleanup(self): try: # Perform cleanup - for spot in self.spot_list: - if not spot.time or spot.time < datetime.now(pytz.UTC) - timedelta(seconds=self.max_spot_age): - self.spot_list.remove(spot) - + self.spots.expire() self.status = "OK" self.last_cleanup_time = datetime.now(pytz.UTC) diff --git a/providers/provider.py b/providers/provider.py index 13d8c7f..a71c630 100644 --- a/providers/provider.py +++ b/providers/provider.py @@ -3,7 +3,7 @@ from datetime import datetime import pytz from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION -from core.config import config, SERVER_OWNER_CALLSIGN +from core.config import config, SERVER_OWNER_CALLSIGN, MAX_SPOT_AGE # Generic data provider class. Subclasses of this query the individual APIs for data. @@ -19,11 +19,11 @@ class Provider: self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_spot_time = datetime.min.replace(tzinfo=pytz.UTC) self.status = "Not Started" if self.enabled else "Disabled" - self.spot_list = None + self.spots = None # Set up the provider, e.g. giving it the spot list to work from - def setup(self, spot_list): - self.spot_list = spot_list + def setup(self, spots): + self.spots = spots # Start the provider. This should return immediately after spawning threads to access the remote resources def start(self): @@ -38,8 +38,8 @@ class Provider: if spot.time > self.last_spot_time: # Fill in any blanks spot.infer_missing() - # Append to the list - self.spot_list.append(spot) + # Add to the list + self.spots.add(spot.guid, spot, expire=MAX_SPOT_AGE) self.last_spot_time = max(map(lambda s: s.time, spots)) # Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots @@ -48,8 +48,8 @@ class Provider: def submit(self, spot): # Fill in any blanks spot.infer_missing() - # Append to the list - self.spot_list.append(spot) + # Add to the list + self.spots.add(spot.guid, spot, expire=MAX_SPOT_AGE) self.last_spot_time = spot.time # Stop any threads and prepare for application shutdown diff --git a/server/webserver.py b/server/webserver.py index bd4d9d7..be56de1 100644 --- a/server/webserver.py +++ b/server/webserver.py @@ -16,10 +16,10 @@ from core.utils import serialize_everything class WebServer: # Constructor - def __init__(self, spot_list, status_data, port): + def __init__(self, spots, status_data, port): self.last_page_access_time = None self.last_api_access_time = None - self.spot_list = spot_list + self.spots = spots self.status_data = status_data self.port = port self.thread = Thread(target=self.run) @@ -90,7 +90,11 @@ class WebServer: # value or a comma-separated list. # We can provide a "limit" number as well. Spots are always returned newest-first; "limit" limits to only the # most recent X spots. - spots = sorted(self.spot_list, key=lambda spot: spot.time, reverse=True) + spot_guids = list(self.spots.iterkeys()) + spots = [] + for k in spot_guids: + spots.append(self.spots.get(k)) + spots = sorted(spots, key=lambda spot: spot.time, reverse=True) for k in query.keys(): match k: case "since": diff --git a/spothole.py b/spothole.py index 3b08150..c4584c6 100644 --- a/spothole.py +++ b/spothole.py @@ -9,24 +9,16 @@ from time import sleep import importlib import psutil import pytz +from diskcache import Cache from core.cleanup import CleanupTimer from core.config import config, MAX_SPOT_AGE, WEB_SERVER_PORT, SERVER_OWNER_CALLSIGN from core.constants import SOFTWARE_VERSION -from providers.aprsis import APRSIS -from providers.dxcluster import DXCluster -from providers.gma import GMA -from providers.hema import HEMA -from providers.parksnpeaks import ParksNPeaks -from providers.pota import POTA -from providers.rbn import RBN -from providers.sota import SOTA -from providers.wwbota import WWBOTA -from providers.wwff import WWFF +from core.utils import QRZ_CALLSIGN_DATA_CACHE from server.webserver import WebServer # Globals -spot_list = [] +spots = Cache('.spots_cache') status_data = {} providers = [] cleanup_timer = None @@ -41,6 +33,8 @@ def shutdown(sig, frame): if p.enabled: p.stop() cleanup_timer.stop() + QRZ_CALLSIGN_DATA_CACHE.close() + spots.close() # Utility method to get a data provider based on the class specified in its config entry. def get_provider_from_config(config_providers_entry): @@ -71,18 +65,18 @@ if __name__ == '__main__': for entry in config["providers"]: providers.append(get_provider_from_config(entry)) # Set up data providers - for p in providers: p.setup(spot_list=spot_list) + for p in providers: p.setup(spots=spots) # Start data providers for p in providers: if p.enabled: p.start() # Set up timer to clear spot list of old data - cleanup_timer = CleanupTimer(spot_list=spot_list, cleanup_interval=60, max_spot_age=MAX_SPOT_AGE) + cleanup_timer = CleanupTimer(spots=spots, cleanup_interval=60) cleanup_timer.start() # Set up web server - web_server = WebServer(spot_list=spot_list, status_data=status_data, port=WEB_SERVER_PORT) + web_server = WebServer(spots=spots, status_data=status_data, port=WEB_SERVER_PORT) web_server.start() logging.info("Startup complete.") @@ -92,7 +86,7 @@ if __name__ == '__main__': sleep(5) status_data["uptime"] = str(datetime.now(pytz.UTC) - startup_time).split(".")[0] status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3) - status_data["num_spots"] = len(spot_list) + status_data["num_spots"] = len(spots) status_data["providers"] = list(map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, "last_updated": p.last_update_time, "last_spot": p.last_spot_time}, providers)) status_data["cleanup"] = {"status": cleanup_timer.status, "last_ran": cleanup_timer.last_cleanup_time} status_data["webserver"] = {"status": web_server.status, "last_api_access": web_server.last_api_access_time, "last_page_access": web_server.last_page_access_time}