Improve expired spot handling and efficiency of handling expired spots during web requests.

This commit is contained in:
Ian Renton
2025-11-29 16:12:44 +00:00
parent 3da8c80ad6
commit 8a4f23ac72
6 changed files with 64 additions and 49 deletions

View File

@@ -29,12 +29,15 @@ class CleanupTimer:
# Perform cleanup and reschedule next timer # Perform cleanup and reschedule next timer
def cleanup(self): def cleanup(self):
try: try:
# Perform cleanup # Perform cleanup via letting the data expire
self.spots.expire() self.spots.expire()
self.alerts.expire() self.alerts.expire()
# Alerts can persist in the system for a while, so we want to explicitly clean up any alerts that have # Explicitly clean up any spots and alerts that have expired
# expired for id in list(self.spots.iterkeys()):
spot = self.spots[id]
if spot.expired():
self.spots.delete(id)
for id in list(self.alerts.iterkeys()): for id in list(self.alerts.iterkeys()):
alert = self.alerts[id] alert = self.alerts[id]
if alert.expired(): if alert.expired():

View File

@@ -137,7 +137,7 @@ class Alert:
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True) return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True)
# Decide if this alert has expired (in which case it should not be added to the system in the first place, and not # Decide if this alert has expired (in which case it should not be added to the system in the first place, and not
# returned by the web server if later requested, and removed by the cleanup functions. "Expired" is defined as # returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as
# either having an end_time in the past, or if it only has a start_time, then that start time was more than 3 hours # either having an end_time in the past, or if it only has a start_time, then that start time was more than 3 hours
# ago. If it somehow doesn't have a start_time either, it is considered to be expired. # ago. If it somehow doesn't have a start_time either, it is considered to be expired.
def expired(self): def expired(self):

View File

@@ -4,11 +4,12 @@ import json
import logging import logging
import re import re
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime from datetime import datetime, timedelta
import pytz import pytz
from pyhamtools.locator import locator_to_latlong, latlong_to_locator from pyhamtools.locator import locator_to_latlong, latlong_to_locator
from core.config import MAX_SPOT_AGE
from core.lookup_helper import lookup_helper from core.lookup_helper import lookup_helper
from core.sig_utils import get_icon_for_sig, get_sig_ref_info, ANY_SIG_REGEX, get_ref_regex_for_sig from core.sig_utils import get_icon_for_sig, get_sig_ref_info, ANY_SIG_REGEX, get_ref_regex_for_sig
from data.sig_ref import SIGRef from data.sig_ref import SIGRef
@@ -403,3 +404,10 @@ class Spot:
if sig_ref.id == new_sig_ref.id and sig_ref.sig == new_sig_ref.sig: if sig_ref.id == new_sig_ref.id and sig_ref.sig == new_sig_ref.sig:
return return
self.sig_refs.append(new_sig_ref) self.sig_refs.append(new_sig_ref)
# Decide if this spot has expired (in which case it should not be added to the system in the first place, and not
# returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as
# either having a time further ago than the server's MAX_SPOT_AGE. If it somehow doesn't have a time either, it is
# considered to be expired.
def expired(self):
return not self.time or self.time < (datetime.now(pytz.UTC) - timedelta(seconds=MAX_SPOT_AGE)).timestamp()

View File

@@ -422,7 +422,6 @@ class WebServer:
if a is not None: if a is not None:
alerts.append(a) alerts.append(a)
# We never want alerts that seem to be in the past # We never want alerts that seem to be in the past
alerts = list(filter(lambda alert: not alert.expired(), alerts))
alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0)) alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0))
for k in query.keys(): for k in query.keys():
match k: match k:

View File

@@ -42,43 +42,46 @@ class GMA(HTTPSpotProvider):
# GMA doesn't give what programme (SIG) the reference is for until we separately look it up. # GMA doesn't give what programme (SIG) the reference is for until we separately look it up.
if "REF" in source_spot: if "REF" in source_spot:
ref_response = SEMI_STATIC_URL_DATA_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"], try:
headers=HTTP_HEADERS) ref_response = SEMI_STATIC_URL_DATA_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"],
# Sometimes this is blank, so handle that headers=HTTP_HEADERS)
if ref_response.text is not None and ref_response.text != "": # Sometimes this is blank, so handle that
ref_info = ref_response.json() if ref_response.text is not None and ref_response.text != "":
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. POTA and WWFF ref_info = ref_response.json()
# spots come through with reftype=POTA or reftype=WWFF. SOTA is harder to figure out because both SOTA # If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. POTA and WWFF
# and GMA summits come through with reftype=Summit, so we must check for the presence of a "sota" entry # spots come through with reftype=POTA or reftype=WWFF. SOTA is harder to figure out because both SOTA
# to determine if it's a SOTA summit. # and GMA summits come through with reftype=Summit, so we must check for the presence of a "sota" entry
if "reftype" in ref_info and ref_info["reftype"] not in ["POTA", "WWFF"] and ( # to determine if it's a SOTA summit.
ref_info["reftype"] != "Summit" or ref_info["sota"] == ""): if "reftype" in ref_info and ref_info["reftype"] not in ["POTA", "WWFF"] and (
match ref_info["reftype"]: ref_info["reftype"] != "Summit" or ref_info["sota"] == ""):
case "Summit": match ref_info["reftype"]:
spot.sig_refs[0].sig = "GMA" case "Summit":
spot.sig = "GMA" spot.sig_refs[0].sig = "GMA"
case "IOTA Island": spot.sig = "GMA"
spot.sig_refs[0].sig = "IOTA" case "IOTA Island":
spot.sig = "IOTA" spot.sig_refs[0].sig = "IOTA"
case "Lighthouse (ILLW)": spot.sig = "IOTA"
spot.sig_refs[0].sig = "ILLW" case "Lighthouse (ILLW)":
spot.sig = "ILLW" spot.sig_refs[0].sig = "ILLW"
case "Lighthouse (ARLHS)": spot.sig = "ILLW"
spot.sig_refs[0].sig = "ARLHS" case "Lighthouse (ARLHS)":
spot.sig = "ARLHS" spot.sig_refs[0].sig = "ARLHS"
case "Castle": spot.sig = "ARLHS"
spot.sig_refs[0].sig = "WCA" case "Castle":
spot.sig = "WCA" spot.sig_refs[0].sig = "WCA"
case "Mill": spot.sig = "WCA"
spot.sig_refs[0].sig = "MOTA" case "Mill":
spot.sig = "MOTA" spot.sig_refs[0].sig = "MOTA"
case _: spot.sig = "MOTA"
logging.warn("GMA spot found with ref type " + ref_info[ case _:
"reftype"] + ", developer needs to add support for this!") logging.warn("GMA spot found with ref type " + ref_info[
spot.sig_refs[0].sig = ref_info["reftype"] "reftype"] + ", developer needs to add support for this!")
spot.sig = ref_info["reftype"] spot.sig_refs[0].sig = ref_info["reftype"]
spot.sig = ref_info["reftype"]
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us. # that for us.
new_spots.append(spot) new_spots.append(spot)
except:
logging.warn("Exception when looking up " + self.REF_INFO_URL_ROOT + source_spot["REF"] + ", ignoring this spot for now")
return new_spots return new_spots

View File

@@ -34,8 +34,9 @@ class SpotProvider:
if datetime.fromtimestamp(spot.time, pytz.UTC) > self.last_spot_time: if datetime.fromtimestamp(spot.time, pytz.UTC) > self.last_spot_time:
# Fill in any blanks # Fill in any blanks
spot.infer_missing() spot.infer_missing()
# Add to the list # Add to the list, provided it heas not already expired.
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) if not spot.expired():
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC) self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC)
# Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots # Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots
@@ -44,9 +45,10 @@ class SpotProvider:
def submit(self, spot): def submit(self, spot):
# Fill in any blanks # Fill in any blanks
spot.infer_missing() spot.infer_missing()
# Add to the list # Add to the list, provided it heas not already expired.
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) if not spot.expired():
self.last_spot_time = datetime.fromtimestamp(spot.time, pytz.UTC) self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
self.last_spot_time = datetime.fromtimestamp(spot.time, pytz.UTC)
# Stop any threads and prepare for application shutdown # Stop any threads and prepare for application shutdown
def stop(self): def stop(self):