Refactor looking up SIG reference details into a common location, taking it out of the individual spot providers. This means we can now look up references properly from Cluster spot comments, etc. Closes #74 as there is no longer any duplication of these lookups. Works towards #54 as sig_refs now specify their sig internally.

This commit is contained in:
Ian Renton
2025-11-02 15:45:19 +00:00
parent 28010a68ae
commit 286ff66721
22 changed files with 192 additions and 233 deletions

View File

@@ -8,7 +8,7 @@ import pytz
import telnetlib3
from core.config import SERVER_OWNER_CALLSIGN
from core.sig_utils import ANY_SIG_REGEX, get_icon_for_sig, get_ref_regex_for_sig
from core.sig_utils import ANY_SIG_REGEX, get_ref_regex_for_sig
from data.sig_ref import SIGRef
from data.spot import Spot
from spotproviders.spot_provider import SpotProvider
@@ -85,12 +85,11 @@ class DXCluster(SpotProvider):
sig_match = re.search(r"(^|\W)" + ANY_SIG_REGEX + r"($|\W)", spot.comment, re.IGNORECASE)
if sig_match:
spot.sig = sig_match.group(2).upper()
spot.icon = get_icon_for_sig(spot.sig)
ref_regex = get_ref_regex_for_sig(spot.sig)
if ref_regex:
sig_ref_match = re.search(r"(^|\W)" + spot.sig + r"($|\W)(" + ref_regex + r")($|\W)", spot.comment, re.IGNORECASE)
if sig_ref_match:
spot.sig_refs = [SIGRef(id=sig_ref_match.group(3).upper())]
spot.sig_refs = [SIGRef(id=sig_ref_match.group(3).upper(), sig=spot.sig)]
# Add to our list
self.submit(spot)

View File

@@ -34,7 +34,7 @@ class GMA(HTTPSpotProvider):
mode=source_spot["MODE"].upper() if "<>" not in source_spot["MODE"] else None,
# Filter out some weird mode strings
comment=source_spot["TEXT"],
sig_refs=[SIGRef(id=source_spot["REF"], name=source_spot["NAME"], url="https://www.cqgma.org/zinfo.php?ref=" + source_spot["REF"])],
sig_refs=[SIGRef(id=source_spot["REF"], sig="", name=source_spot["NAME"])],
time=datetime.strptime(source_spot["DATE"] + source_spot["TIME"], "%Y%m%d%H%M").replace(
tzinfo=pytz.UTC).timestamp(),
dx_latitude=float(source_spot["LAT"]) if (source_spot["LAT"] and source_spot["LAT"] != "") else None,
@@ -54,22 +54,21 @@ class GMA(HTTPSpotProvider):
if ref_info["reftype"] not in ["POTA", "WWFF"] and (ref_info["reftype"] != "Summit" or ref_info["sota"] == ""):
match ref_info["reftype"]:
case "Summit":
spot.sig = "GMA"
spot.sig_refs[0].sig = "GMA"
case "IOTA Island":
spot.sig = "IOTA"
spot.sig_refs[0].sig = "IOTA"
case "Lighthouse (ILLW)":
spot.sig = "ILLW"
spot.sig_refs[0].sig = "ILLW"
case "Lighthouse (ARLHS)":
spot.sig = "ARLHS"
spot.sig_refs[0].sig = "ARLHS"
case "Castle":
spot.sig = "WCA"
spot.sig_refs[0].sig = "WCA"
case "Mill":
spot.sig = "MOTA"
spot.sig_refs[0].sig = "MOTA"
case _:
logging.warn("GMA spot found with ref type " + ref_info[
"reftype"] + ", developer needs to add support for this!")
spot.sig = ref_info["reftype"]
spot.icon = get_icon_for_sig(spot.sig)
spot.sig_refs[0].sig = ref_info["reftype"]
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us.

View File

@@ -53,9 +53,7 @@ class HEMA(HTTPSpotProvider):
freq=float(freq_mode_match.group(1)) * 1000000,
mode=freq_mode_match.group(2).upper(),
comment=spotter_comment_match.group(2),
sig="HEMA",
sig_refs=[SIGRef(id=spot_items[3].upper(), name=spot_items[4])],
icon=get_icon_for_sig("HEMA"),
sig_refs=[SIGRef(id=spot_items[3].upper(), sig="HEMA", name=spot_items[4])],
time=datetime.strptime(spot_items[0], "%d/%m/%Y %H:%M").replace(tzinfo=pytz.UTC).timestamp(),
dx_latitude=float(spot_items[7]),
dx_longitude=float(spot_items[8]))

View File

@@ -1,12 +1,9 @@
import csv
import logging
import re
from datetime import datetime
import pytz
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
from data.spot import Spot
@@ -36,9 +33,7 @@ class ParksNPeaks(HTTPSpotProvider):
# Seen PNP spots with empty frequency, and with comma-separated thousands digits
mode=source_spot["actMode"].upper(),
comment=source_spot["actComments"],
sig=source_spot["actClass"].upper(),
sig_refs=[SIGRef(id=source_spot["actSiteID"])],
icon=get_icon_for_sig(source_spot["actClass"]),
sig_refs=[SIGRef(id=source_spot["actSiteID"], sig=source_spot["actClass"].upper())],
time=datetime.strptime(source_spot["actTime"], "%Y-%m-%d %H:%M:%S").replace(
tzinfo=pytz.UTC).timestamp())
@@ -52,24 +47,11 @@ class ParksNPeaks(HTTPSpotProvider):
spot.de_call = m.group(1)
# Log a warning for the developer if PnP gives us an unknown programme we've never seen before
if spot.sig not in ["POTA", "SOTA", "WWFF", "SIOTA", "ZLOTA", "KRMNPA"]:
if spot.sig_refs[0].sig not in ["POTA", "SOTA", "WWFF", "SIOTA", "ZLOTA", "KRMNPA"]:
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to add support for this!")
# SiOTA lat/lon/grid lookup
if spot.sig == "SIOTA":
siota_csv_data = SEMI_STATIC_URL_DATA_CACHE.get(self.SIOTA_LIST_URL, headers=HTTP_HEADERS)
siota_dr = csv.DictReader(siota_csv_data.content.decode().splitlines())
for row in siota_dr:
if row["SILO_CODE"] == spot.sig_refs[0]:
spot.dx_latitude = float(row["LAT"])
spot.dx_longitude = float(row["LNG"])
spot.dx_grid = row["LOCATOR"]
break
# Note there is currently no support for KRMNPA location lookup, see issue #61.
# If this is POTA, SOTA, WWFF or ZLOTA data we already have it through other means, so ignore. Otherwise,
# add to the spot list.
if spot.sig not in ["POTA", "SOTA", "WWFF", "ZLOTA"]:
if spot.sig_refs[0].sig not in ["POTA", "SOTA", "WWFF", "ZLOTA"]:
new_spots.append(spot)
return new_spots

View File

@@ -3,8 +3,6 @@ from datetime import datetime
import pytz
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig, get_ref_regex_for_sig
from data.sig_ref import SIGRef
from data.spot import Spot
@@ -33,9 +31,7 @@ class POTA(HTTPSpotProvider):
freq=float(source_spot["frequency"]) * 1000,
mode=source_spot["mode"].upper(),
comment=source_spot["comments"],
sig="POTA",
sig_refs=[SIGRef(id=source_spot["reference"], name=source_spot["name"], url="https://pota.app/#/park/" + source_spot["reference"])],
icon=get_icon_for_sig("POTA"),
sig_refs=[SIGRef(id=source_spot["reference"], sig="POTA", name=source_spot["name"])],
time=datetime.strptime(source_spot["spotTime"], "%Y-%m-%dT%H:%M:%S").replace(
tzinfo=pytz.UTC).timestamp(),
dx_grid=source_spot["grid6"],
@@ -46,16 +42,7 @@ class POTA(HTTPSpotProvider):
all_comment_refs = re.findall(get_ref_regex_for_sig("POTA"), spot.comment)
for r in all_comment_refs:
if r not in list(map(lambda ref: ref.id, spot.sig_refs)):
ref = SIGRef(id=r.upper(), url="https://pota.app/#/park/" + r.upper())
# Now we need to look up the name of that reference from the API, because the comment won't have it
park_response = SEMI_STATIC_URL_DATA_CACHE.get(self.PARK_URL_ROOT + r.upper(), headers=HTTP_HEADERS)
park_data = park_response.json()
if park_data and "name" in park_data:
ref.name = park_data["name"]
# Finally append our new reference to the spot's reference list
spot.sig_refs.append(ref)
spot.sig_refs.append(SIGRef(id=r.upper(), sig="POTA"))
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us.

View File

@@ -1,10 +1,7 @@
import logging
from datetime import datetime, timedelta
from datetime import datetime
import requests
from requests_cache import CachedSession
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
@@ -48,22 +45,10 @@ class SOTA(HTTPSpotProvider):
freq=(float(source_spot["frequency"]) * 1000000) if (source_spot["frequency"] is not None) else None, # Seen SOTA spots with no frequency!
mode=source_spot["mode"].upper(),
comment=source_spot["comments"],
sig="SOTA",
sig_refs=[SIGRef(id=source_spot["summitCode"], name=source_spot["summitName"], url="https://www.sotadata.org.uk/en/summit/" + source_spot["summitCode"])],
icon=get_icon_for_sig("SOTA"),
sig_refs=[SIGRef(id=source_spot["summitCode"], sig="SOTA", name=source_spot["summitName"])],
time=datetime.fromisoformat(source_spot["timeStamp"]).timestamp(),
activation_score=source_spot["points"])
# SOTA doesn't give summit lat/lon/grid in the main call, so we need another separate call for this
try:
summit_response = SEMI_STATIC_URL_DATA_CACHE.get(self.SUMMIT_URL_ROOT + source_spot["summitCode"], headers=HTTP_HEADERS)
summit_data = summit_response.json()
spot.dx_grid = summit_data["locator"]
spot.dx_latitude = summit_data["latitude"]
spot.dx_longitude = summit_data["longitude"]
except Exception:
logging.warn("Looking up summit " + source_spot["summitCode"] + " from the SOTA API failed. No summit data was available.")
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us.
new_spots.append(spot)

View File

@@ -3,8 +3,6 @@ from datetime import datetime
import pytz
from rss_parser import RSSParser
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
from data.spot import Spot
@@ -66,21 +64,8 @@ class WOTA(HTTPSpotProvider):
freq=freq_hz,
mode=mode,
comment=comment,
sig="WOTA",
sig_refs=[SIGRef(id=ref, name=ref_name, url="https://www.wota.org.uk/MM_" + ref)] if ref else [],
icon=get_icon_for_sig("WOTA"),
sig_refs=[SIGRef(id=ref, sig="WOTA", name=ref_name)] if ref else [],
time=time.timestamp())
# WOTA name/grid/lat/lon lookup
if ref:
wota_data = SEMI_STATIC_URL_DATA_CACHE.get(self.LIST_URL, headers=HTTP_HEADERS).json()
for feature in wota_data["features"]:
if feature["properties"]["wotaId"] == ref:
spot.sig_refs[0].name = feature["properties"]["title"]
spot.dx_latitude = feature["geometry"]["coordinates"][1]
spot.dx_longitude = feature["geometry"]["coordinates"][0]
spot.dx_grid = feature["properties"]["qthLocator"]
break
new_spots.append(spot)
return new_spots

View File

@@ -20,10 +20,7 @@ class WWBOTA(SSESpotProvider):
# n-fer activations.
refs = []
for ref in source_spot["references"]:
sigref = SIGRef(id=ref["reference"], name=ref["name"])
# Bunkerbase URLs only work for UK bunkers, so only add a URL if we have a B/G prefix.
if ref["reference"].startswith("B/G"):
sigref.url="https://bunkerwiki.org/?s=" + ref["reference"]
sigref = SIGRef(id=ref["reference"], sig="WWBOTA", name=ref["name"])
refs.append(sigref)
spot = Spot(source=self.name,
@@ -32,9 +29,7 @@ class WWBOTA(SSESpotProvider):
freq=float(source_spot["freq"]) * 1000000,
mode=source_spot["mode"].upper(),
comment=source_spot["comment"],
sig="WWBOTA",
sig_refs=refs,
icon=get_icon_for_sig("WWBOTA"),
time=datetime.fromisoformat(source_spot["time"]).timestamp(),
# WWBOTA spots can contain multiple references for bunkers being activated simultaneously. For
# now, we will just pick the first one to use as our grid, latitude and longitude.

View File

@@ -28,9 +28,7 @@ class WWFF(HTTPSpotProvider):
freq=float(source_spot["frequency_khz"]) * 1000,
mode=source_spot["mode"].upper(),
comment=source_spot["remarks"],
sig="WWFF",
sig_refs=[SIGRef(id=source_spot["reference"], name=source_spot["reference_name"], url="https://wwff.co/directory/?showRef=" + source_spot["reference"])],
icon=get_icon_for_sig("WWFF"),
sig_refs=[SIGRef(id=source_spot["reference"], sig="WWFF", name=source_spot["reference_name"])],
time=datetime.fromtimestamp(source_spot["spot_time"], tz=pytz.UTC).timestamp(),
dx_latitude=source_spot["latitude"],
dx_longitude=source_spot["longitude"])

View File

@@ -2,8 +2,6 @@ from datetime import datetime
import pytz
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
from core.constants import HTTP_HEADERS
from core.sig_utils import get_icon_for_sig
from data.sig_ref import SIGRef
from data.spot import Spot
@@ -36,18 +34,8 @@ class ZLOTA(HTTPSpotProvider):
freq=freq_hz,
mode=source_spot["mode"].upper().strip(),
comment=source_spot["comments"],
sig="ZLOTA",
sig_refs=[SIGRef(id=source_spot["reference"], name=source_spot["name"])],
icon=get_icon_for_sig("ZLOTA"),
sig_refs=[SIGRef(id=source_spot["reference"], sig="ZLOTA", name=source_spot["name"])],
time=datetime.fromisoformat(source_spot["referenced_time"]).astimezone(pytz.UTC).timestamp())
# ZLOTA lat/lon lookup
zlota_data = SEMI_STATIC_URL_DATA_CACHE.get(self.LIST_URL, headers=HTTP_HEADERS).json()
for asset in zlota_data:
if asset["code"] == spot.sig_refs[0]:
spot.dx_latitude = asset["y"]
spot.dx_longitude = asset["x"]
break
new_spots.append(spot)
return new_spots