Improve expired spot handling and efficiency of handling expired spots during web requests.

This commit is contained in:
Ian Renton
2025-11-29 16:12:44 +00:00
parent 3da8c80ad6
commit 8a4f23ac72
6 changed files with 64 additions and 49 deletions

View File

@@ -29,12 +29,15 @@ class CleanupTimer:
# Perform cleanup and reschedule next timer
def cleanup(self):
try:
# Perform cleanup
# Perform cleanup via letting the data expire
self.spots.expire()
self.alerts.expire()
# Alerts can persist in the system for a while, so we want to explicitly clean up any alerts that have
# expired
# Explicitly clean up any spots and alerts that have expired
for id in list(self.spots.iterkeys()):
spot = self.spots[id]
if spot.expired():
self.spots.delete(id)
for id in list(self.alerts.iterkeys()):
alert = self.alerts[id]
if alert.expired():

View File

@@ -137,7 +137,7 @@ class Alert:
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True)
# Decide if this alert has expired (in which case it should not be added to the system in the first place, and not
# returned by the web server if later requested, and removed by the cleanup functions. "Expired" is defined as
# returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as
# either having an end_time in the past, or if it only has a start_time, then that start time was more than 3 hours
# ago. If it somehow doesn't have a start_time either, it is considered to be expired.
def expired(self):

View File

@@ -4,11 +4,12 @@ import json
import logging
import re
from dataclasses import dataclass
from datetime import datetime
from datetime import datetime, timedelta
import pytz
from pyhamtools.locator import locator_to_latlong, latlong_to_locator
from core.config import MAX_SPOT_AGE
from core.lookup_helper import lookup_helper
from core.sig_utils import get_icon_for_sig, get_sig_ref_info, ANY_SIG_REGEX, get_ref_regex_for_sig
from data.sig_ref import SIGRef
@@ -403,3 +404,10 @@ class Spot:
if sig_ref.id == new_sig_ref.id and sig_ref.sig == new_sig_ref.sig:
return
self.sig_refs.append(new_sig_ref)
# Decide if this spot has expired (in which case it should not be added to the system in the first place, and not
# returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as
# either having a time further ago than the server's MAX_SPOT_AGE. If it somehow doesn't have a time either, it is
# considered to be expired.
def expired(self):
return not self.time or self.time < (datetime.now(pytz.UTC) - timedelta(seconds=MAX_SPOT_AGE)).timestamp()

View File

@@ -422,7 +422,6 @@ class WebServer:
if a is not None:
alerts.append(a)
# We never want alerts that seem to be in the past
alerts = list(filter(lambda alert: not alert.expired(), alerts))
alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0))
for k in query.keys():
match k:

View File

@@ -42,43 +42,46 @@ class GMA(HTTPSpotProvider):
# GMA doesn't give what programme (SIG) the reference is for until we separately look it up.
if "REF" in source_spot:
ref_response = SEMI_STATIC_URL_DATA_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"],
headers=HTTP_HEADERS)
# Sometimes this is blank, so handle that
if ref_response.text is not None and ref_response.text != "":
ref_info = ref_response.json()
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. POTA and WWFF
# spots come through with reftype=POTA or reftype=WWFF. SOTA is harder to figure out because both SOTA
# and GMA summits come through with reftype=Summit, so we must check for the presence of a "sota" entry
# to determine if it's a SOTA summit.
if "reftype" in ref_info and ref_info["reftype"] not in ["POTA", "WWFF"] and (
ref_info["reftype"] != "Summit" or ref_info["sota"] == ""):
match ref_info["reftype"]:
case "Summit":
spot.sig_refs[0].sig = "GMA"
spot.sig = "GMA"
case "IOTA Island":
spot.sig_refs[0].sig = "IOTA"
spot.sig = "IOTA"
case "Lighthouse (ILLW)":
spot.sig_refs[0].sig = "ILLW"
spot.sig = "ILLW"
case "Lighthouse (ARLHS)":
spot.sig_refs[0].sig = "ARLHS"
spot.sig = "ARLHS"
case "Castle":
spot.sig_refs[0].sig = "WCA"
spot.sig = "WCA"
case "Mill":
spot.sig_refs[0].sig = "MOTA"
spot.sig = "MOTA"
case _:
logging.warn("GMA spot found with ref type " + ref_info[
"reftype"] + ", developer needs to add support for this!")
spot.sig_refs[0].sig = ref_info["reftype"]
spot.sig = ref_info["reftype"]
try:
ref_response = SEMI_STATIC_URL_DATA_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"],
headers=HTTP_HEADERS)
# Sometimes this is blank, so handle that
if ref_response.text is not None and ref_response.text != "":
ref_info = ref_response.json()
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. POTA and WWFF
# spots come through with reftype=POTA or reftype=WWFF. SOTA is harder to figure out because both SOTA
# and GMA summits come through with reftype=Summit, so we must check for the presence of a "sota" entry
# to determine if it's a SOTA summit.
if "reftype" in ref_info and ref_info["reftype"] not in ["POTA", "WWFF"] and (
ref_info["reftype"] != "Summit" or ref_info["sota"] == ""):
match ref_info["reftype"]:
case "Summit":
spot.sig_refs[0].sig = "GMA"
spot.sig = "GMA"
case "IOTA Island":
spot.sig_refs[0].sig = "IOTA"
spot.sig = "IOTA"
case "Lighthouse (ILLW)":
spot.sig_refs[0].sig = "ILLW"
spot.sig = "ILLW"
case "Lighthouse (ARLHS)":
spot.sig_refs[0].sig = "ARLHS"
spot.sig = "ARLHS"
case "Castle":
spot.sig_refs[0].sig = "WCA"
spot.sig = "WCA"
case "Mill":
spot.sig_refs[0].sig = "MOTA"
spot.sig = "MOTA"
case _:
logging.warn("GMA spot found with ref type " + ref_info[
"reftype"] + ", developer needs to add support for this!")
spot.sig_refs[0].sig = ref_info["reftype"]
spot.sig = ref_info["reftype"]
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us.
new_spots.append(spot)
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us.
new_spots.append(spot)
except:
logging.warn("Exception when looking up " + self.REF_INFO_URL_ROOT + source_spot["REF"] + ", ignoring this spot for now")
return new_spots

View File

@@ -34,8 +34,9 @@ class SpotProvider:
if datetime.fromtimestamp(spot.time, pytz.UTC) > self.last_spot_time:
# Fill in any blanks
spot.infer_missing()
# Add to the list
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
# Add to the list, provided it heas not already expired.
if not spot.expired():
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC)
# Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots
@@ -44,9 +45,10 @@ class SpotProvider:
def submit(self, spot):
# Fill in any blanks
spot.infer_missing()
# Add to the list
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
self.last_spot_time = datetime.fromtimestamp(spot.time, pytz.UTC)
# Add to the list, provided it heas not already expired.
if not spot.expired():
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
self.last_spot_time = datetime.fromtimestamp(spot.time, pytz.UTC)
# Stop any threads and prepare for application shutdown
def stop(self):