Single common URL cache for semi-static lookups #74

This commit is contained in:
Ian Renton
2025-11-02 14:22:15 +00:00
parent 0e8c7873d8
commit 28010a68ae
9 changed files with 37 additions and 50 deletions

View File

@@ -1,9 +1,9 @@
import logging
from datetime import datetime, timedelta
from datetime import datetime
import pytz
from requests_cache import CachedSession
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
@@ -17,8 +17,6 @@ class GMA(HTTPSpotProvider):
SPOTS_URL = "https://www.cqgma.org/api/spots/25/"
# GMA spots don't contain the details of the programme they are for, we need a separate lookup for that
REF_INFO_URL_ROOT = "https://www.cqgma.org/api/ref/?"
REF_INFO_CACHE_TIME_DAYS = 30
REF_INFO_CACHE = CachedSession("cache/gma_ref_info_cache", expire_after=timedelta(days=REF_INFO_CACHE_TIME_DAYS))
def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
@@ -44,7 +42,7 @@ class GMA(HTTPSpotProvider):
dx_longitude=float(source_spot["LON"]) if (source_spot["LON"] and source_spot["LON"] != "") else None)
# GMA doesn't give what programme (SIG) the reference is for until we separately look it up.
ref_response = self.REF_INFO_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"],
ref_response = SEMI_STATIC_URL_DATA_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"],
headers=HTTP_HEADERS)
# Sometimes this is blank, so handle that
if ref_response.text is not None and ref_response.text != "":

View File

@@ -1,11 +1,11 @@
import csv
import logging
import re
from datetime import datetime, timedelta
from datetime import datetime
import pytz
from requests_cache import CachedSession
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
@@ -18,8 +18,6 @@ class ParksNPeaks(HTTPSpotProvider):
POLL_INTERVAL_SEC = 120
SPOTS_URL = "https://www.parksnpeaks.org/api/ALL"
SIOTA_LIST_URL = "https://www.silosontheair.com/data/silos.csv"
SIOTA_LIST_CACHE_TIME_DAYS = 30
SIOTA_LIST_CACHE = CachedSession("cache/siota_data_cache", expire_after=timedelta(days=SIOTA_LIST_CACHE_TIME_DAYS))
def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
@@ -59,7 +57,7 @@ class ParksNPeaks(HTTPSpotProvider):
# SiOTA lat/lon/grid lookup
if spot.sig == "SIOTA":
siota_csv_data = self.SIOTA_LIST_CACHE.get(self.SIOTA_LIST_URL, headers=HTTP_HEADERS)
siota_csv_data = SEMI_STATIC_URL_DATA_CACHE.get(self.SIOTA_LIST_URL, headers=HTTP_HEADERS)
siota_dr = csv.DictReader(siota_csv_data.content.decode().splitlines())
for row in siota_dr:
if row["SILO_CODE"] == spot.sig_refs[0]:

View File

@@ -1,9 +1,9 @@
import re
from datetime import datetime, timedelta
from datetime import datetime
import pytz
from requests_cache import CachedSession
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig, get_ref_regex_for_sig
from data.sig_ref import SIGRef
@@ -17,9 +17,6 @@ class POTA(HTTPSpotProvider):
SPOTS_URL = "https://api.pota.app/spot/activator"
# Might need to look up extra park data
PARK_URL_ROOT = "https://api.pota.app/park/"
PARK_DATA_CACHE_TIME_DAYS = 30
PARK_DATA_CACHE = CachedSession("cache/pota_park_data_cache",
expire_after=timedelta(days=PARK_DATA_CACHE_TIME_DAYS))
def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
@@ -52,7 +49,7 @@ class POTA(HTTPSpotProvider):
ref = SIGRef(id=r.upper(), url="https://pota.app/#/park/" + r.upper())
# Now we need to look up the name of that reference from the API, because the comment won't have it
park_response = self.PARK_DATA_CACHE.get(self.PARK_URL_ROOT + r.upper(), headers=HTTP_HEADERS)
park_response = SEMI_STATIC_URL_DATA_CACHE.get(self.PARK_URL_ROOT + r.upper(), headers=HTTP_HEADERS)
park_data = park_response.json()
if park_data and "name" in park_data:
ref.name = park_data["name"]

View File

@@ -4,6 +4,7 @@ from datetime import datetime, timedelta
import requests
from requests_cache import CachedSession
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
@@ -21,8 +22,6 @@ class SOTA(HTTPSpotProvider):
SPOTS_URL = "https://api-db2.sota.org.uk/api/spots/60/all/all"
# SOTA spots don't contain lat/lon, we need a separate lookup for that
SUMMIT_URL_ROOT = "https://api-db2.sota.org.uk/api/summits/"
SUMMIT_DATA_CACHE_TIME_DAYS = 30
SUMMIT_DATA_CACHE = CachedSession("cache/sota_summit_data_cache", expire_after=timedelta(days=SUMMIT_DATA_CACHE_TIME_DAYS))
def __init__(self, provider_config):
super().__init__(provider_config, self.EPOCH_URL, self.POLL_INTERVAL_SEC)
@@ -57,7 +56,7 @@ class SOTA(HTTPSpotProvider):
# SOTA doesn't give summit lat/lon/grid in the main call, so we need another separate call for this
try:
summit_response = self.SUMMIT_DATA_CACHE.get(self.SUMMIT_URL_ROOT + source_spot["summitCode"], headers=HTTP_HEADERS)
summit_response = SEMI_STATIC_URL_DATA_CACHE.get(self.SUMMIT_URL_ROOT + source_spot["summitCode"], headers=HTTP_HEADERS)
summit_data = summit_response.json()
spot.dx_grid = summit_data["locator"]
spot.dx_latitude = summit_data["latitude"]

View File

@@ -1,9 +1,9 @@
from datetime import timedelta, datetime
from datetime import datetime
import pytz
from requests_cache import CachedSession
from rss_parser import RSSParser
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
@@ -16,8 +16,6 @@ class WOTA(HTTPSpotProvider):
POLL_INTERVAL_SEC = 120
SPOTS_URL = "https://www.wota.org.uk/spots_rss.php"
LIST_URL = "https://www.wota.org.uk/mapping/data/summits.json"
LIST_CACHE_TIME_DAYS = 30
LIST_CACHE = CachedSession("cache/wota_data_cache", expire_after=timedelta(days=LIST_CACHE_TIME_DAYS))
RSS_DATE_TIME_FORMAT = "%a, %d %b %Y %H:%M:%S %z"
def __init__(self, provider_config):
@@ -75,7 +73,7 @@ class WOTA(HTTPSpotProvider):
# WOTA name/grid/lat/lon lookup
if ref:
wota_data = self.LIST_CACHE.get(self.LIST_URL, headers=HTTP_HEADERS).json()
wota_data = SEMI_STATIC_URL_DATA_CACHE.get(self.LIST_URL, headers=HTTP_HEADERS).json()
for feature in wota_data["features"]:
if feature["properties"]["wotaId"] == ref:
spot.sig_refs[0].name = feature["properties"]["title"]

View File

@@ -1,11 +1,8 @@
import csv
import logging
import re
from datetime import datetime, timedelta
from datetime import datetime
import pytz
from requests_cache import CachedSession
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
@@ -18,8 +15,6 @@ class ZLOTA(HTTPSpotProvider):
POLL_INTERVAL_SEC = 120
SPOTS_URL = "https://ontheair.nz/api/spots?zlota_only=true"
LIST_URL = "https://ontheair.nz/assets/assets.json"
LIST_CACHE_TIME_DAYS = 30
LIST_CACHE = CachedSession("cache/zlota_data_cache", expire_after=timedelta(days=LIST_CACHE_TIME_DAYS))
def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
@@ -47,7 +42,7 @@ class ZLOTA(HTTPSpotProvider):
time=datetime.fromisoformat(source_spot["referenced_time"]).astimezone(pytz.UTC).timestamp())
# ZLOTA lat/lon lookup
zlota_data = self.LIST_CACHE.get(self.LIST_URL, headers=HTTP_HEADERS).json()
zlota_data = SEMI_STATIC_URL_DATA_CACHE.get(self.LIST_URL, headers=HTTP_HEADERS).json()
for asset in zlota_data:
if asset["code"] == spot.sig_refs[0]:
spot.dx_latitude = asset["y"]