Improve adherence to python coding standards and clear up IDE static analysis warnings

This commit is contained in:
Ian Renton
2026-02-27 19:17:04 +00:00
parent 6b18ec6f88
commit 6982354364
53 changed files with 633 additions and 626 deletions

View File

@@ -15,39 +15,39 @@ class AlertProvider:
self.enabled = provider_config["enabled"] self.enabled = provider_config["enabled"]
self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC)
self.status = "Not Started" if self.enabled else "Disabled" self.status = "Not Started" if self.enabled else "Disabled"
self.alerts = None self._alerts = None
self.web_server = None self._web_server = None
def setup(self, alerts, web_server): def setup(self, alerts, web_server):
"""Set up the provider, e.g. giving it the alert list to work from""" """Set up the provider, e.g. giving it the alert list to work from"""
self.alerts = alerts self._alerts = alerts
self.web_server = web_server self._web_server = web_server
def start(self): def start(self):
"""Start the provider. This should return immediately after spawning threads to access the remote resources""" """Start the provider. This should return immediately after spawning threads to access the remote resources"""
raise NotImplementedError("Subclasses must implement this method") raise NotImplementedError("Subclasses must implement this method")
def submit_batch(self, alerts): def _submit_batch(self, alerts):
"""Submit a batch of alerts retrieved from the provider. There is no timestamp checking like there is for spots, """Submit a batch of alerts retrieved from the provider. There is no timestamp checking like there is for spots,
because alerts could be created at any point for any time in the future. Rely on hashcode-based id matching because alerts could be created at any point for any time in the future. Rely on hashcode-based id matching
to deal with duplicates.""" to deal with duplicates."""
# Sort the batch so that earliest ones go in first. This helps keep the ordering correct when alerts are fired # Sort the batch so that earliest ones go in first. This helps keep the ordering correct when alerts are fired
# off to SSE listeners. # off to SSE listeners.
alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0)) alerts = sorted(alerts, key=lambda a: (a.start_time if a and a.start_time else 0))
for alert in alerts: for alert in alerts:
# Fill in any blanks and add to the list # Fill in any blanks and add to the list
alert.infer_missing() alert.infer_missing()
self.add_alert(alert) self._add_alert(alert)
def add_alert(self, alert): def _add_alert(self, alert):
if not alert.expired(): if not alert.expired():
self.alerts.add(alert.id, alert, expire=MAX_ALERT_AGE) self._alerts.add(alert.id, alert, expire=MAX_ALERT_AGE)
# Ping the web server in case we have any SSE connections that need to see this immediately # Ping the web server in case we have any SSE connections that need to see this immediately
if self.web_server: if self._web_server:
self.web_server.notify_new_alert(alert) self._web_server.notify_new_alert(alert)
def stop(self): def stop(self):
"""Stop any threads and prepare for application shutdown""" """Stop any threads and prepare for application shutdown"""

View File

@@ -17,7 +17,7 @@ class BOTA(HTTPAlertProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_alerts(self, http_response): def _http_response_to_alerts(self, http_response):
new_alerts = [] new_alerts = []
# Find the table of upcoming alerts # Find the table of upcoming alerts
bs = BeautifulSoup(http_response.content.decode(), features="lxml") bs = BeautifulSoup(http_response.content.decode(), features="lxml")

View File

@@ -15,14 +15,15 @@ class HTTPAlertProvider(AlertProvider):
def __init__(self, provider_config, url, poll_interval): def __init__(self, provider_config, url, poll_interval):
super().__init__(provider_config) super().__init__(provider_config)
self.url = url self._url = url
self.poll_interval = poll_interval self._poll_interval = poll_interval
self._thread = None
self._stop_event = Event() self._stop_event = Event()
def start(self): def start(self):
# Fire off the polling thread. It will poll immediately on startup, then sleep for poll_interval between # Fire off the polling thread. It will poll immediately on startup, then sleep for poll_interval between
# subsequent polls, so start() returns immediately and the application can continue starting. # subsequent polls, so start() returns immediately and the application can continue starting.
logging.info("Set up query of " + self.name + " alert API every " + str(self.poll_interval) + " seconds.") logging.info("Set up query of " + self.name + " alert API every " + str(self._poll_interval) + " seconds.")
self._thread = Thread(target=self._run, daemon=True) self._thread = Thread(target=self._run, daemon=True)
self._thread.start() self._thread.start()
@@ -32,31 +33,31 @@ class HTTPAlertProvider(AlertProvider):
def _run(self): def _run(self):
while True: while True:
self._poll() self._poll()
if self._stop_event.wait(timeout=self.poll_interval): if self._stop_event.wait(timeout=self._poll_interval):
break break
def _poll(self): def _poll(self):
try: try:
# Request data from API # Request data from API
logging.debug("Polling " + self.name + " alert API...") logging.debug("Polling " + self.name + " alert API...")
http_response = requests.get(self.url, headers=HTTP_HEADERS) http_response = requests.get(self._url, headers=HTTP_HEADERS)
# Pass off to the subclass for processing # Pass off to the subclass for processing
new_alerts = self.http_response_to_alerts(http_response) new_alerts = self._http_response_to_alerts(http_response)
# Submit the new alerts for processing. There might not be any alerts for the less popular programs. # Submit the new alerts for processing. There might not be any alerts for the less popular programs.
if new_alerts: if new_alerts:
self.submit_batch(new_alerts) self._submit_batch(new_alerts)
self.status = "OK" self.status = "OK"
self.last_update_time = datetime.now(pytz.UTC) self.last_update_time = datetime.now(pytz.UTC)
logging.debug("Received data from " + self.name + " alert API.") logging.debug("Received data from " + self.name + " alert API.")
except Exception as e: except Exception:
self.status = "Error" self.status = "Error"
logging.exception("Exception in HTTP JSON Alert Provider (" + self.name + ")") logging.exception("Exception in HTTP JSON Alert Provider (" + self.name + ")")
# Brief pause on error before the next poll, but still respond promptly to stop() # Brief pause on error before the next poll, but still respond promptly to stop()
self._stop_event.wait(timeout=1) self._stop_event.wait(timeout=1)
def http_response_to_alerts(self, http_response): def _http_response_to_alerts(self, http_response):
"""Convert an HTTP response returned by the API into alert data. The whole response is provided here so the subclass """Convert an HTTP response returned by the API into alert data. The whole response is provided here so the subclass
implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever
the API actually provides.""" the API actually provides."""

View File

@@ -18,7 +18,7 @@ class NG3K(HTTPAlertProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_alerts(self, http_response): def _http_response_to_alerts(self, http_response):
new_alerts = [] new_alerts = []
rss = RSSParser.parse(http_response.content.decode()) rss = RSSParser.parse(http_response.content.decode())
# Iterate through source data # Iterate through source data

View File

@@ -17,7 +17,7 @@ class ParksNPeaks(HTTPAlertProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_alerts(self, http_response): def _http_response_to_alerts(self, http_response):
new_alerts = [] new_alerts = []
# Iterate through source data # Iterate through source data
for source_alert in http_response.json(): for source_alert in http_response.json():
@@ -45,7 +45,7 @@ class ParksNPeaks(HTTPAlertProvider):
# Log a warning for the developer if PnP gives us an unknown programme we've never seen before # Log a warning for the developer if PnP gives us an unknown programme we've never seen before
if sig and sig not in ["POTA", "SOTA", "WWFF", "SiOTA", "ZLOTA", "KRMNPA"]: if sig and sig not in ["POTA", "SOTA", "WWFF", "SiOTA", "ZLOTA", "KRMNPA"]:
logging.warn("PNP alert found with sig " + sig + ", developer needs to add support for this!") logging.warning("PNP alert found with sig " + sig + ", developer needs to add support for this!")
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. Otherwise, add to # If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. Otherwise, add to
# the alert list. Note that while ZLOTA has its own spots API, it doesn't have its own alerts API. So that # the alert list. Note that while ZLOTA has its own spots API, it doesn't have its own alerts API. So that

View File

@@ -16,7 +16,7 @@ class POTA(HTTPAlertProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_alerts(self, http_response): def _http_response_to_alerts(self, http_response):
new_alerts = [] new_alerts = []
# Iterate through source data # Iterate through source data
for source_alert in http_response.json(): for source_alert in http_response.json():

View File

@@ -16,7 +16,7 @@ class SOTA(HTTPAlertProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_alerts(self, http_response): def _http_response_to_alerts(self, http_response):
new_alerts = [] new_alerts = []
# Iterate through source data # Iterate through source data
for source_alert in http_response.json(): for source_alert in http_response.json():

View File

@@ -18,7 +18,7 @@ class WOTA(HTTPAlertProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_alerts(self, http_response): def _http_response_to_alerts(self, http_response):
new_alerts = [] new_alerts = []
rss = RSSParser.parse(http_response.content.decode()) rss = RSSParser.parse(http_response.content.decode())
# Iterate through source data # Iterate through source data

View File

@@ -16,7 +16,7 @@ class WWFF(HTTPAlertProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_alerts(self, http_response): def _http_response_to_alerts(self, http_response):
new_alerts = [] new_alerts = []
# Iterate through source data # Iterate through source data
for source_alert in http_response.json(): for source_alert in http_response.json():

View File

@@ -1,7 +1,6 @@
import logging import logging
from datetime import datetime from datetime import datetime
from threading import Timer, Event, Thread from threading import Event, Thread
from time import sleep
import pytz import pytz
@@ -12,13 +11,13 @@ class CleanupTimer:
def __init__(self, spots, alerts, web_server, cleanup_interval): def __init__(self, spots, alerts, web_server, cleanup_interval):
"""Constructor""" """Constructor"""
self.spots = spots self._spots = spots
self.alerts = alerts self._alerts = alerts
self.web_server = web_server self._web_server = web_server
self.cleanup_interval = cleanup_interval self._cleanup_interval = cleanup_interval
self.cleanup_timer = None
self.last_cleanup_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_cleanup_time = datetime.min.replace(tzinfo=pytz.UTC)
self.status = "Starting" self.status = "Starting"
self._thread = None
self._stop_event = Event() self._stop_event = Event()
def start(self): def start(self):
@@ -33,7 +32,7 @@ class CleanupTimer:
self._stop_event.set() self._stop_event.set()
def _run(self): def _run(self):
while not self._stop_event.wait(timeout=self.cleanup_interval): while not self._stop_event.wait(timeout=self._cleanup_interval):
self._cleanup() self._cleanup()
def _cleanup(self): def _cleanup(self):
@@ -41,34 +40,34 @@ class CleanupTimer:
try: try:
# Perform cleanup via letting the data expire # Perform cleanup via letting the data expire
self.spots.expire() self._spots.expire()
self.alerts.expire() self._alerts.expire()
# Explicitly clean up any spots and alerts that have expired # Explicitly clean up any spots and alerts that have expired
for id in list(self.spots.iterkeys()): for i in list(self._spots.iterkeys()):
try: try:
spot = self.spots[id] spot = self._spots[i]
if spot.expired(): if spot.expired():
self.spots.delete(id) self._spots.delete(i)
except KeyError: except KeyError:
# Must have already been deleted, OK with that # Must have already been deleted, OK with that
pass pass
for id in list(self.alerts.iterkeys()): for i in list(self._alerts.iterkeys()):
try: try:
alert = self.alerts[id] alert = self._alerts[i]
if alert.expired(): if alert.expired():
self.alerts.delete(id) self._alerts.delete(i)
except KeyError: except KeyError:
# Must have already been deleted, OK with that # Must have already been deleted, OK with that
pass pass
# Clean up web server SSE spot/alert queues # Clean up web server SSE spot/alert queues
self.web_server.clean_up_sse_queues() self._web_server.clean_up_sse_queues()
self.status = "OK" self.status = "OK"
self.last_cleanup_time = datetime.now(pytz.UTC) self.last_cleanup_time = datetime.now(pytz.UTC)
except Exception as e: except Exception:
self.status = "Error" self.status = "Error"
logging.exception("Exception in Cleanup thread") logging.exception("Exception in Cleanup thread")
self._stop_event.wait(timeout=1) self._stop_event.wait(timeout=1)

View File

@@ -106,7 +106,7 @@ def lat_lon_for_grid_sw_corner_plus_size(grid):
# Return None if our Maidenhead string is invalid or too short # Return None if our Maidenhead string is invalid or too short
length = len(grid) length = len(grid)
if length <= 0 or (length % 2) != 0: if length <= 0 or (length % 2) != 0:
return (None, None, None, None) return None, None, None, None
lat = 0.0 # aggregated latitude lat = 0.0 # aggregated latitude
lon = 0.0 # aggregated longitude lon = 0.0 # aggregated longitude
@@ -124,17 +124,17 @@ def lat_lon_for_grid_sw_corner_plus_size(grid):
# A-X (0-23) thereafter. # A-X (0-23) thereafter.
max_cell_no = 17 if block == 0 else 23 max_cell_no = 17 if block == 0 else 23
if lat_cell_no < 0 or lat_cell_no > max_cell_no or lon_cell_no < 0 or lon_cell_no > max_cell_no: if lat_cell_no < 0 or lat_cell_no > max_cell_no or lon_cell_no < 0 or lon_cell_no > max_cell_no:
return (None, None, None, None) return None, None, None, None
else: else:
# Numbers in this block # Numbers in this block
try: try:
lon_cell_no = int(grid[block * 2]) lon_cell_no = int(grid[block * 2])
lat_cell_no = int(grid[block * 2 + 1]) lat_cell_no = int(grid[block * 2 + 1])
except ValueError: except ValueError:
return (None, None, None, None) return None, None, None, None
# Bail if the values aren't in range 0-9 # Bail if the values aren't in range 0-9
if lat_cell_no < 0 or lat_cell_no > 9 or lon_cell_no < 0 or lon_cell_no > 9: if lat_cell_no < 0 or lat_cell_no > 9 or lon_cell_no < 0 or lon_cell_no > 9:
return (None, None, None, None) return None, None, None, None
# Aggregate the angles # Aggregate the angles
lat += lat_cell_no * lat_cell_size lat += lat_cell_no * lat_cell_size

View File

@@ -27,30 +27,30 @@ class LookupHelper:
lookup methods will fail if start() has not yet been called. This therefore needs starting before any spot or lookup methods will fail if start() has not yet been called. This therefore needs starting before any spot or
alert handlers are created.""" alert handlers are created."""
self.CLUBLOG_CALLSIGN_DATA_CACHE = None self._clublog_callsign_data_cache = None
self.LOOKUP_LIB_CLUBLOG_XML = None self._lookup_lib_clublog_xml = None
self.CLUBLOG_XML_AVAILABLE = None self._clublog_xml_available = None
self.LOOKUP_LIB_CLUBLOG_API = None self._lookup_lib_clublog_api = None
self.CLUBLOG_XML_DOWNLOAD_LOCATION = None self._clublog_xml_download_location = None
self.CLUBLOG_API_AVAILABLE = None self._clublog_api_available = None
self.CLUBLOG_CTY_XML_CACHE = None self._clublog_cty_xml_cache = None
self.CLUBLOG_API_KEY = None self._clublog_api_key = None
self.QRZ_CALLSIGN_DATA_CACHE = None self._qrz_callsign_data_cache = None
self.LOOKUP_LIB_QRZ = None self._lookup_lib_qrz = None
self.QRZ_AVAILABLE = None self._qrz_available = None
self.HAMQTH_AVAILABLE = None self._hamqth_available = None
self.HAMQTH_CALLSIGN_DATA_CACHE = None self._hamqth_callsign_data_cache = None
self.HAMQTH_BASE_URL = "https://www.hamqth.com/xml.php" self._hamqth_base_url = "https://www.hamqth.com/xml.php"
# HamQTH session keys expire after an hour. Rather than working out how much time has passed manually, we cheat # HamQTH session keys expire after an hour. Rather than working out how much time has passed manually, we cheat
# and cache the HTTP response for 55 minutes, so when the login URL is queried within 55 minutes of the previous # and cache the HTTP response for 55 minutes, so when the login URL is queried within 55 minutes of the previous
# time, you just get the cached response. # time, you just get the cached response.
self.HAMQTH_SESSION_LOOKUP_CACHE = CachedSession("cache/hamqth_session_cache", self._hamqth_session_lookup_cache = CachedSession("cache/hamqth_session_cache",
expire_after=timedelta(minutes=55)) expire_after=timedelta(minutes=55))
self.CALL_INFO_BASIC = None self._call_info_basic = None
self.LOOKUP_LIB_BASIC = None self._lookup_lib_basic = None
self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION = None self._country_files_cty_plist_download_location = None
self.DXCC_JSON_DOWNLOAD_LOCATION = None self._dxcc_json_download_location = None
self.DXCC_DATA = None self._dxcc_data = None
def start(self): def start(self):
# Lookup helpers from pyhamtools. We use five (!) of these. The simplest is country-files.com, which downloads # Lookup helpers from pyhamtools. We use five (!) of these. The simplest is country-files.com, which downloads
@@ -58,55 +58,55 @@ class LookupHelper:
# If the user provides login details/API keys, we also set up helpers for QRZ.com, HamQTH, Clublog (live API # If the user provides login details/API keys, we also set up helpers for QRZ.com, HamQTH, Clublog (live API
# request), and Clublog (XML download). The lookup functions iterate through these in a sensible order, looking # request), and Clublog (XML download). The lookup functions iterate through these in a sensible order, looking
# for suitable data. # for suitable data.
self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION = "cache/cty.plist" self._country_files_cty_plist_download_location = "cache/cty.plist"
success = self.download_country_files_cty_plist() success = self._download_country_files_cty_plist()
if success: if success:
self.LOOKUP_LIB_BASIC = LookupLib(lookuptype="countryfile", self._lookup_lib_basic = LookupLib(lookuptype="countryfile",
filename=self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION) filename=self._country_files_cty_plist_download_location)
else: else:
self.LOOKUP_LIB_BASIC = LookupLib(lookuptype="countryfile") self._lookup_lib_basic = LookupLib(lookuptype="countryfile")
self.CALL_INFO_BASIC = Callinfo(self.LOOKUP_LIB_BASIC) self._call_info_basic = Callinfo(self._lookup_lib_basic)
self.QRZ_AVAILABLE = config["qrz-username"] != "" and config["qrz-password"] != "" self._qrz_available = config["qrz-username"] != "" and config["qrz-password"] != ""
if self.QRZ_AVAILABLE: if self._qrz_available:
self.LOOKUP_LIB_QRZ = LookupLib(lookuptype="qrz", username=config["qrz-username"], self._lookup_lib_qrz = LookupLib(lookuptype="qrz", username=config["qrz-username"],
pwd=config["qrz-password"]) pwd=config["qrz-password"])
self.QRZ_CALLSIGN_DATA_CACHE = Cache('cache/qrz_callsign_lookup_cache') self._qrz_callsign_data_cache = Cache('cache/qrz_callsign_lookup_cache')
self.HAMQTH_AVAILABLE = config["hamqth-username"] != "" and config["hamqth-password"] != "" self._hamqth_available = config["hamqth-username"] != "" and config["hamqth-password"] != ""
self.HAMQTH_CALLSIGN_DATA_CACHE = Cache('cache/hamqth_callsign_lookup_cache') self._hamqth_callsign_data_cache = Cache('cache/hamqth_callsign_lookup_cache')
self.CLUBLOG_API_KEY = config["clublog-api-key"] self._clublog_api_key = config["clublog-api-key"]
self.CLUBLOG_CTY_XML_CACHE = CachedSession("cache/clublog_cty_xml_cache", expire_after=timedelta(days=10)) self._clublog_cty_xml_cache = CachedSession("cache/clublog_cty_xml_cache", expire_after=timedelta(days=10))
self.CLUBLOG_API_AVAILABLE = self.CLUBLOG_API_KEY != "" self._clublog_api_available = self._clublog_api_key != ""
self.CLUBLOG_XML_DOWNLOAD_LOCATION = "cache/cty.xml" self._clublog_xml_download_location = "cache/cty.xml"
if self.CLUBLOG_API_AVAILABLE: if self._clublog_api_available:
self.LOOKUP_LIB_CLUBLOG_API = LookupLib(lookuptype="clublogapi", apikey=self.CLUBLOG_API_KEY) self._lookup_lib_clublog_api = LookupLib(lookuptype="clublogapi", apikey=self._clublog_api_key)
success = self.download_clublog_ctyxml() success = self._download_clublog_ctyxml()
self.CLUBLOG_XML_AVAILABLE = success self._clublog_xml_available = success
if success: if success:
self.LOOKUP_LIB_CLUBLOG_XML = LookupLib(lookuptype="clublogxml", self._lookup_lib_clublog_xml = LookupLib(lookuptype="clublogxml",
filename=self.CLUBLOG_XML_DOWNLOAD_LOCATION) filename=self._clublog_xml_download_location)
self.CLUBLOG_CALLSIGN_DATA_CACHE = Cache('cache/clublog_callsign_lookup_cache') self._clublog_callsign_data_cache = Cache('cache/clublog_callsign_lookup_cache')
# We also get a lookup of DXCC data from K0SWE to use for additional lookups of e.g. flags. # We also get a lookup of DXCC data from K0SWE to use for additional lookups of e.g. flags.
self.DXCC_JSON_DOWNLOAD_LOCATION = "cache/dxcc.json" self._dxcc_json_download_location = "cache/dxcc.json"
success = self.download_dxcc_json() success = self._download_dxcc_json()
if success: if success:
with open(self.DXCC_JSON_DOWNLOAD_LOCATION) as f: with open(self._dxcc_json_download_location) as f:
tmp_dxcc_data = json.load(f)["dxcc"] tmp_dxcc_data = json.load(f)["dxcc"]
# Reformat as a map for faster lookup # Reformat as a map for faster lookup
self.DXCC_DATA = {} self._dxcc_data = {}
for dxcc in tmp_dxcc_data: for dxcc in tmp_dxcc_data:
self.DXCC_DATA[dxcc["entityCode"]] = dxcc self._dxcc_data[dxcc["entityCode"]] = dxcc
else: else:
logging.error("Could not download DXCC data, flags and similar data may be missing!") logging.error("Could not download DXCC data, flags and similar data may be missing!")
# Precompile regex matches for DXCCs to improve efficiency when iterating through them # Precompile regex matches for DXCCs to improve efficiency when iterating through them
for dxcc in self.DXCC_DATA.values(): for dxcc in self._dxcc_data.values():
dxcc["_prefixRegexCompiled"] = re.compile(dxcc["prefixRegex"]) dxcc["_prefixRegexCompiled"] = re.compile(dxcc["prefixRegex"])
def download_country_files_cty_plist(self): def _download_country_files_cty_plist(self):
"""Download the cty.plist file from country-files.com on first startup. The pyhamtools lib can actually download and use """Download the cty.plist file from country-files.com on first startup. The pyhamtools lib can actually download and use
this itself, but it's occasionally offline which causes it to throw an error. By downloading it separately, we can this itself, but it's occasionally offline which causes it to throw an error. By downloading it separately, we can
catch errors and handle them, falling back to a previous copy of the file in the cache, and we can use the catch errors and handle them, falling back to a previous copy of the file in the cache, and we can use the
@@ -117,7 +117,7 @@ class LookupHelper:
response = SEMI_STATIC_URL_DATA_CACHE.get("https://www.country-files.com/cty/cty.plist", response = SEMI_STATIC_URL_DATA_CACHE.get("https://www.country-files.com/cty/cty.plist",
headers=HTTP_HEADERS).text headers=HTTP_HEADERS).text
with open(self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION, "w") as f: with open(self._country_files_cty_plist_download_location, "w") as f:
f.write(response) f.write(response)
f.flush() f.flush()
return True return True
@@ -126,7 +126,7 @@ class LookupHelper:
logging.error("Exception when downloading Clublog cty.xml", e) logging.error("Exception when downloading Clublog cty.xml", e)
return False return False
def download_dxcc_json(self): def _download_dxcc_json(self):
"""Download the dxcc.json file on first startup.""" """Download the dxcc.json file on first startup."""
try: try:
@@ -135,7 +135,7 @@ class LookupHelper:
"https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json", "https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json",
headers=HTTP_HEADERS).text headers=HTTP_HEADERS).text
with open(self.DXCC_JSON_DOWNLOAD_LOCATION, "w") as f: with open(self._dxcc_json_download_location, "w") as f:
f.write(response) f.write(response)
f.flush() f.flush()
return True return True
@@ -144,20 +144,20 @@ class LookupHelper:
logging.error("Exception when downloading dxcc.json", e) logging.error("Exception when downloading dxcc.json", e)
return False return False
def download_clublog_ctyxml(self): def _download_clublog_ctyxml(self):
"""Download the cty.xml (gzipped) file from Clublog on first startup, so we can use it in preference to querying the """Download the cty.xml (gzipped) file from Clublog on first startup, so we can use it in preference to querying the
database live if possible.""" database live if possible."""
try: try:
logging.info("Downloading Clublog cty.xml.gz...") logging.info("Downloading Clublog cty.xml.gz...")
response = self.CLUBLOG_CTY_XML_CACHE.get("https://cdn.clublog.org/cty.php?api=" + self.CLUBLOG_API_KEY, response = self._clublog_cty_xml_cache.get("https://cdn.clublog.org/cty.php?api=" + self._clublog_api_key,
headers=HTTP_HEADERS) headers=HTTP_HEADERS)
logging.info("Caching Clublog cty.xml.gz...") logging.info("Caching Clublog cty.xml.gz...")
open(self.CLUBLOG_XML_DOWNLOAD_LOCATION + ".gz", 'wb').write(response.content) open(self._clublog_xml_download_location + ".gz", 'wb').write(response.content)
with gzip.open(self.CLUBLOG_XML_DOWNLOAD_LOCATION + ".gz", "rb") as uncompressed: with gzip.open(self._clublog_xml_download_location + ".gz", "rb") as uncompressed:
file_content = uncompressed.read() file_content = uncompressed.read()
logging.info("Caching Clublog cty.xml...") logging.info("Caching Clublog cty.xml...")
with open(self.CLUBLOG_XML_DOWNLOAD_LOCATION, "wb") as f: with open(self._clublog_xml_download_location, "wb") as f:
f.write(file_content) f.write(file_content)
f.flush() f.flush()
return True return True
@@ -166,69 +166,36 @@ class LookupHelper:
logging.error("Exception when downloading Clublog cty.xml", e) logging.error("Exception when downloading Clublog cty.xml", e)
return False return False
def infer_mode_from_comment(self, comment):
"""Infer a mode from the comment"""
for mode in ALL_MODES:
if mode in comment.upper():
return mode
for mode in MODE_ALIASES.keys():
if mode in comment.upper():
return MODE_ALIASES[mode]
return None
def infer_mode_type_from_mode(self, mode):
"""Infer a "mode family" from a mode."""
if mode.upper() in CW_MODES:
return "CW"
elif mode.upper() in PHONE_MODES:
return "PHONE"
elif mode.upper() in DATA_MODES:
return "DATA"
else:
if mode.upper() != "OTHER":
logging.warn("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
return None
def infer_band_from_freq(self, freq):
"""Infer a band from a frequency in Hz"""
for b in BANDS:
if b.start_freq <= freq <= b.end_freq:
return b
return UNKNOWN_BAND
def infer_country_from_callsign(self, call): def infer_country_from_callsign(self, call):
"""Infer a country name from a callsign""" """Infer a country name from a callsign"""
try: try:
# Start with the basic country-files.com-based decoder. # Start with the basic country-files.com-based decoder.
country = self.CALL_INFO_BASIC.get_country_name(call) country = self._call_info_basic.get_country_name(call)
except (KeyError, ValueError) as e: except (KeyError, ValueError):
country = None country = None
# Couldn't get anything from basic call info database, try QRZ.com # Couldn't get anything from basic call info database, try QRZ.com
if not country: if not country:
qrz_data = self.get_qrz_data_for_callsign(call) qrz_data = self._get_qrz_data_for_callsign(call)
if qrz_data and "country" in qrz_data: if qrz_data and "country" in qrz_data:
country = qrz_data["country"] country = qrz_data["country"]
# Couldn't get anything from QRZ.com database, try HamQTH # Couldn't get anything from QRZ.com database, try HamQTH
if not country: if not country:
hamqth_data = self.get_hamqth_data_for_callsign(call) hamqth_data = self._get_hamqth_data_for_callsign(call)
if hamqth_data and "country" in hamqth_data: if hamqth_data and "country" in hamqth_data:
country = hamqth_data["country"] country = hamqth_data["country"]
# Couldn't get anything from HamQTH database, try Clublog data # Couldn't get anything from HamQTH database, try Clublog data
if not country: if not country:
clublog_data = self.get_clublog_xml_data_for_callsign(call) clublog_data = self._get_clublog_xml_data_for_callsign(call)
if clublog_data and "Name" in clublog_data: if clublog_data and "Name" in clublog_data:
country = clublog_data["Name"] country = clublog_data["Name"]
if not country: if not country:
clublog_data = self.get_clublog_api_data_for_callsign(call) clublog_data = self._get_clublog_api_data_for_callsign(call)
if clublog_data and "Name" in clublog_data: if clublog_data and "Name" in clublog_data:
country = clublog_data["Name"] country = clublog_data["Name"]
# Couldn't get anything from Clublog database, try DXCC data # Couldn't get anything from Clublog database, try DXCC data
if not country: if not country:
dxcc_data = self.get_dxcc_data_for_callsign(call) dxcc_data = self._get_dxcc_data_for_callsign(call)
if dxcc_data and "name" in dxcc_data: if dxcc_data and "name" in dxcc_data:
country = dxcc_data["name"] country = dxcc_data["name"]
return country return country
@@ -238,31 +205,31 @@ class LookupHelper:
try: try:
# Start with the basic country-files.com-based decoder. # Start with the basic country-files.com-based decoder.
dxcc = self.CALL_INFO_BASIC.get_adif_id(call) dxcc = self._call_info_basic.get_adif_id(call)
except (KeyError, ValueError) as e: except (KeyError, ValueError):
dxcc = None dxcc = None
# Couldn't get anything from basic call info database, try QRZ.com # Couldn't get anything from basic call info database, try QRZ.com
if not dxcc: if not dxcc:
qrz_data = self.get_qrz_data_for_callsign(call) qrz_data = self._get_qrz_data_for_callsign(call)
if qrz_data and "adif" in qrz_data: if qrz_data and "adif" in qrz_data:
dxcc = qrz_data["adif"] dxcc = qrz_data["adif"]
# Couldn't get anything from QRZ.com database, try HamQTH # Couldn't get anything from QRZ.com database, try HamQTH
if not dxcc: if not dxcc:
hamqth_data = self.get_hamqth_data_for_callsign(call) hamqth_data = self._get_hamqth_data_for_callsign(call)
if hamqth_data and "adif" in hamqth_data: if hamqth_data and "adif" in hamqth_data:
dxcc = hamqth_data["adif"] dxcc = hamqth_data["adif"]
# Couldn't get anything from HamQTH database, try Clublog data # Couldn't get anything from HamQTH database, try Clublog data
if not dxcc: if not dxcc:
clublog_data = self.get_clublog_xml_data_for_callsign(call) clublog_data = self._get_clublog_xml_data_for_callsign(call)
if clublog_data and "DXCC" in clublog_data: if clublog_data and "DXCC" in clublog_data:
dxcc = clublog_data["DXCC"] dxcc = clublog_data["DXCC"]
if not dxcc: if not dxcc:
clublog_data = self.get_clublog_api_data_for_callsign(call) clublog_data = self._get_clublog_api_data_for_callsign(call)
if clublog_data and "DXCC" in clublog_data: if clublog_data and "DXCC" in clublog_data:
dxcc = clublog_data["DXCC"] dxcc = clublog_data["DXCC"]
# Couldn't get anything from Clublog database, try DXCC data # Couldn't get anything from Clublog database, try DXCC data
if not dxcc: if not dxcc:
dxcc_data = self.get_dxcc_data_for_callsign(call) dxcc_data = self._get_dxcc_data_for_callsign(call)
if dxcc_data and "entityCode" in dxcc_data: if dxcc_data and "entityCode" in dxcc_data:
dxcc = dxcc_data["entityCode"] dxcc = dxcc_data["entityCode"]
return dxcc return dxcc
@@ -272,26 +239,26 @@ class LookupHelper:
try: try:
# Start with the basic country-files.com-based decoder. # Start with the basic country-files.com-based decoder.
continent = self.CALL_INFO_BASIC.get_continent(call) continent = self._call_info_basic.get_continent(call)
except (KeyError, ValueError) as e: except (KeyError, ValueError):
continent = None continent = None
# Couldn't get anything from basic call info database, try HamQTH # Couldn't get anything from basic call info database, try HamQTH
if not continent: if not continent:
hamqth_data = self.get_hamqth_data_for_callsign(call) hamqth_data = self._get_hamqth_data_for_callsign(call)
if hamqth_data and "continent" in hamqth_data: if hamqth_data and "continent" in hamqth_data:
country = hamqth_data["continent"] continent = hamqth_data["continent"]
# Couldn't get anything from HamQTH database, try Clublog data # Couldn't get anything from HamQTH database, try Clublog data
if not continent: if not continent:
clublog_data = self.get_clublog_xml_data_for_callsign(call) clublog_data = self._get_clublog_xml_data_for_callsign(call)
if clublog_data and "Continent" in clublog_data: if clublog_data and "Continent" in clublog_data:
continent = clublog_data["Continent"] continent = clublog_data["Continent"]
if not continent: if not continent:
clublog_data = self.get_clublog_api_data_for_callsign(call) clublog_data = self._get_clublog_api_data_for_callsign(call)
if clublog_data and "Continent" in clublog_data: if clublog_data and "Continent" in clublog_data:
continent = clublog_data["Continent"] continent = clublog_data["Continent"]
# Couldn't get anything from Clublog database, try DXCC data # Couldn't get anything from Clublog database, try DXCC data
if not continent: if not continent:
dxcc_data = self.get_dxcc_data_for_callsign(call) dxcc_data = self._get_dxcc_data_for_callsign(call)
# Some DXCCs are in two continents, if so don't use the continent data as we can't be sure # Some DXCCs are in two continents, if so don't use the continent data as we can't be sure
if dxcc_data and "continent" in dxcc_data and len(dxcc_data["continent"]) == 1: if dxcc_data and "continent" in dxcc_data and len(dxcc_data["continent"]) == 1:
continent = dxcc_data["continent"][0] continent = dxcc_data["continent"][0]
@@ -302,31 +269,31 @@ class LookupHelper:
try: try:
# Start with the basic country-files.com-based decoder. # Start with the basic country-files.com-based decoder.
cqz = self.CALL_INFO_BASIC.get_cqz(call) cqz = self._call_info_basic.get_cqz(call)
except (KeyError, ValueError) as e: except (KeyError, ValueError):
cqz = None cqz = None
# Couldn't get anything from basic call info database, try QRZ.com # Couldn't get anything from basic call info database, try QRZ.com
if not cqz: if not cqz:
qrz_data = self.get_qrz_data_for_callsign(call) qrz_data = self._get_qrz_data_for_callsign(call)
if qrz_data and "cqz" in qrz_data: if qrz_data and "cqz" in qrz_data:
cqz = qrz_data["cqz"] cqz = qrz_data["cqz"]
# Couldn't get anything from QRZ.com database, try HamQTH # Couldn't get anything from QRZ.com database, try HamQTH
if not cqz: if not cqz:
hamqth_data = self.get_hamqth_data_for_callsign(call) hamqth_data = self._get_hamqth_data_for_callsign(call)
if hamqth_data and "cq" in hamqth_data: if hamqth_data and "cq" in hamqth_data:
cqz = hamqth_data["cq"] cqz = hamqth_data["cq"]
# Couldn't get anything from HamQTH database, try Clublog data # Couldn't get anything from HamQTH database, try Clublog data
if not cqz: if not cqz:
clublog_data = self.get_clublog_xml_data_for_callsign(call) clublog_data = self._get_clublog_xml_data_for_callsign(call)
if clublog_data and "CQZ" in clublog_data: if clublog_data and "CQZ" in clublog_data:
cqz = clublog_data["CQZ"] cqz = clublog_data["CQZ"]
if not cqz: if not cqz:
clublog_data = self.get_clublog_api_data_for_callsign(call) clublog_data = self._get_clublog_api_data_for_callsign(call)
if clublog_data and "CQZ" in clublog_data: if clublog_data and "CQZ" in clublog_data:
cqz = clublog_data["CQZ"] cqz = clublog_data["CQZ"]
# Couldn't get anything from Clublog database, try DXCC data # Couldn't get anything from Clublog database, try DXCC data
if not cqz: if not cqz:
dxcc_data = self.get_dxcc_data_for_callsign(call) dxcc_data = self._get_dxcc_data_for_callsign(call)
# Some DXCCs are in multiple zones, if so don't use the zone data as we can't be sure # Some DXCCs are in multiple zones, if so don't use the zone data as we can't be sure
if dxcc_data and "cq" in dxcc_data and len(dxcc_data["cq"]) == 1: if dxcc_data and "cq" in dxcc_data and len(dxcc_data["cq"]) == 1:
cqz = dxcc_data["cq"][0] cqz = dxcc_data["cq"][0]
@@ -337,22 +304,22 @@ class LookupHelper:
try: try:
# Start with the basic country-files.com-based decoder. # Start with the basic country-files.com-based decoder.
ituz = self.CALL_INFO_BASIC.get_ituz(call) ituz = self._call_info_basic.get_ituz(call)
except (KeyError, ValueError) as e: except (KeyError, ValueError):
ituz = None ituz = None
# Couldn't get anything from basic call info database, try QRZ.com # Couldn't get anything from basic call info database, try QRZ.com
if not ituz: if not ituz:
qrz_data = self.get_qrz_data_for_callsign(call) qrz_data = self._get_qrz_data_for_callsign(call)
if qrz_data and "ituz" in qrz_data: if qrz_data and "ituz" in qrz_data:
ituz = qrz_data["ituz"] ituz = qrz_data["ituz"]
# Couldn't get anything from QRZ.com database, try HamQTH # Couldn't get anything from QRZ.com database, try HamQTH
if not ituz: if not ituz:
hamqth_data = self.get_hamqth_data_for_callsign(call) hamqth_data = self._get_hamqth_data_for_callsign(call)
if hamqth_data and "itu" in hamqth_data: if hamqth_data and "itu" in hamqth_data:
ituz = hamqth_data["itu"] ituz = hamqth_data["itu"]
# Couldn't get anything from HamQTH database, Clublog doesn't provide this, so try DXCC data # Couldn't get anything from HamQTH database, Clublog doesn't provide this, so try DXCC data
if not ituz: if not ituz:
dxcc_data = self.get_dxcc_data_for_callsign(call) dxcc_data = self._get_dxcc_data_for_callsign(call)
# Some DXCCs are in multiple zones, if so don't use the zone data as we can't be sure # Some DXCCs are in multiple zones, if so don't use the zone data as we can't be sure
if dxcc_data and "itu" in dxcc_data and len(dxcc_data["itu"]) == 1: if dxcc_data and "itu" in dxcc_data and len(dxcc_data["itu"]) == 1:
ituz = dxcc_data["itu"] ituz = dxcc_data["itu"]
@@ -361,18 +328,18 @@ class LookupHelper:
def get_flag_for_dxcc(self, dxcc): def get_flag_for_dxcc(self, dxcc):
"""Get an emoji flag for a given DXCC entity ID""" """Get an emoji flag for a given DXCC entity ID"""
return self.DXCC_DATA[dxcc]["flag"] if dxcc in self.DXCC_DATA else None return self._dxcc_data[dxcc]["flag"] if dxcc in self._dxcc_data else None
def infer_name_from_callsign_online_lookup(self, call): def infer_name_from_callsign_online_lookup(self, call):
"""Infer an operator name from a callsign (requires QRZ.com/HamQTH)""" """Infer an operator name from a callsign (requires QRZ.com/HamQTH)"""
data = self.get_qrz_data_for_callsign(call) data = self._get_qrz_data_for_callsign(call)
if data and "fname" in data: if data and "fname" in data:
name = data["fname"] name = data["fname"]
if "name" in data: if "name" in data:
name = name + " " + data["name"] name = name + " " + data["name"]
return name return name
data = self.get_hamqth_data_for_callsign(call) data = self._get_hamqth_data_for_callsign(call)
if data and "nick" in data: if data and "nick" in data:
return data["nick"] return data["nick"]
else: else:
@@ -382,12 +349,12 @@ class LookupHelper:
"""Infer a latitude and longitude from a callsign (requires QRZ.com/HamQTH) """Infer a latitude and longitude from a callsign (requires QRZ.com/HamQTH)
Coordinates that look default are rejected (apologies if your position really is 0,0, enjoy your voyage)""" Coordinates that look default are rejected (apologies if your position really is 0,0, enjoy your voyage)"""
data = self.get_qrz_data_for_callsign(call) data = self._get_qrz_data_for_callsign(call)
if data and "latitude" in data and "longitude" in data and ( if data and "latitude" in data and "longitude" in data and (
float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float( float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float(
data["latitude"]) < 89.9: data["latitude"]) < 89.9:
return [float(data["latitude"]), float(data["longitude"])] return [float(data["latitude"]), float(data["longitude"])]
data = self.get_hamqth_data_for_callsign(call) data = self._get_hamqth_data_for_callsign(call)
if data and "latitude" in data and "longitude" in data and ( if data and "latitude" in data and "longitude" in data and (
float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float( float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float(
data["latitude"]) < 89.9: data["latitude"]) < 89.9:
@@ -399,11 +366,11 @@ class LookupHelper:
"""Infer a grid locator from a callsign (requires QRZ.com/HamQTH). """Infer a grid locator from a callsign (requires QRZ.com/HamQTH).
Grids that look default are rejected (apologies if your grid really is AA00aa, enjoy your research)""" Grids that look default are rejected (apologies if your grid really is AA00aa, enjoy your research)"""
data = self.get_qrz_data_for_callsign(call) data = self._get_qrz_data_for_callsign(call)
if data and "locator" in data and data["locator"].upper() != "AA00" and data["locator"].upper() != "AA00AA" and \ if data and "locator" in data and data["locator"].upper() != "AA00" and data["locator"].upper() != "AA00AA" and \
data["locator"].upper() != "AA00AA00": data["locator"].upper() != "AA00AA00":
return data["locator"] return data["locator"]
data = self.get_hamqth_data_for_callsign(call) data = self._get_hamqth_data_for_callsign(call)
if data and "grid" in data and data["grid"].upper() != "AA00" and data["grid"].upper() != "AA00AA" and data[ if data and "grid" in data and data["grid"].upper() != "AA00" and data["grid"].upper() != "AA00AA" and data[
"grid"].upper() != "AA00AA00": "grid"].upper() != "AA00AA00":
return data["grid"] return data["grid"]
@@ -413,10 +380,10 @@ class LookupHelper:
def infer_qth_from_callsign_online_lookup(self, call): def infer_qth_from_callsign_online_lookup(self, call):
"""Infer a textual QTH from a callsign (requires QRZ.com/HamQTH)""" """Infer a textual QTH from a callsign (requires QRZ.com/HamQTH)"""
data = self.get_qrz_data_for_callsign(call) data = self._get_qrz_data_for_callsign(call)
if data and "addr2" in data: if data and "addr2" in data:
return data["addr2"] return data["addr2"]
data = self.get_hamqth_data_for_callsign(call) data = self._get_hamqth_data_for_callsign(call)
if data and "qth" in data: if data and "qth" in data:
return data["qth"] return data["qth"]
else: else:
@@ -426,7 +393,7 @@ class LookupHelper:
"""Infer a latitude and longitude from a callsign (using DXCC, probably very inaccurate)""" """Infer a latitude and longitude from a callsign (using DXCC, probably very inaccurate)"""
try: try:
data = self.CALL_INFO_BASIC.get_lat_long(call) data = self._call_info_basic.get_lat_long(call)
if data and "latitude" in data and "longitude" in data: if data and "latitude" in data and "longitude" in data:
loc = [float(data["latitude"]), float(data["longitude"])] loc = [float(data["latitude"]), float(data["longitude"])]
else: else:
@@ -435,11 +402,11 @@ class LookupHelper:
loc = None loc = None
# Couldn't get anything from basic call info database, try Clublog data # Couldn't get anything from basic call info database, try Clublog data
if not loc: if not loc:
data = self.get_clublog_xml_data_for_callsign(call) data = self._get_clublog_xml_data_for_callsign(call)
if data and "Lat" in data and "Lon" in data: if data and "Lat" in data and "Lon" in data:
loc = [float(data["Lat"]), float(data["Lon"])] loc = [float(data["Lat"]), float(data["Lon"])]
if not loc: if not loc:
data = self.get_clublog_api_data_for_callsign(call) data = self._get_clublog_api_data_for_callsign(call)
if data and "Lat" in data and "Lon" in data: if data and "Lat" in data and "Lon" in data:
loc = [float(data["Lat"]), float(data["Lon"])] loc = [float(data["Lat"]), float(data["Lon"])]
return loc return loc
@@ -455,49 +422,28 @@ class LookupHelper:
logging.debug("Invalid lat/lon received for DXCC") logging.debug("Invalid lat/lon received for DXCC")
return grid return grid
def infer_mode_from_frequency(self, freq): def _get_qrz_data_for_callsign(self, call):
"""Infer a mode from the frequency (in Hz) according to the band plan. Just a guess really."""
try:
khz = freq / 1000.0
mode = freq_to_band(khz)["mode"]
# Some additional common digimode ranges in addition to what the 3rd-party freq_to_band function returns.
# This is mostly here just because freq_to_band is very specific about things like FT8 frequencies, and e.g.
# a spot at 7074.5 kHz will be indicated as LSB, even though it's clearly in the FT8 range. Future updates
# might include other common digimode centres of activity here, but this achieves the main goal of keeping
# large numbers of clearly-FT* spots off the list of people filtering out digimodes.
if (7074 <= khz < 7077) or (10136 <= khz < 10139) or (14074 <= khz < 14077) or (18100 <= khz < 18103) or (
21074 <= khz < 21077) or (24915 <= khz < 24918) or (28074 <= khz < 28077):
mode = "FT8"
if (7047.5 <= khz < 7050.5) or (10140 <= khz < 10143) or (14080 <= khz < 14083) or (
18104 <= khz < 18107) or (21140 <= khz < 21143) or (24919 <= khz < 24922) or (28180 <= khz < 28183):
mode = "FT4"
return mode
except KeyError:
return None
def get_qrz_data_for_callsign(self, call):
"""Utility method to get QRZ.com data from cache if possible, if not get it from the API and cache it""" """Utility method to get QRZ.com data from cache if possible, if not get it from the API and cache it"""
# Fetch from cache if we can, otherwise fetch from the API and cache it # Fetch from cache if we can, otherwise fetch from the API and cache it
if call in self.QRZ_CALLSIGN_DATA_CACHE: if call in self._qrz_callsign_data_cache:
return self.QRZ_CALLSIGN_DATA_CACHE.get(call) return self._qrz_callsign_data_cache.get(call)
elif self.QRZ_AVAILABLE: elif self._qrz_available:
try: try:
data = self.LOOKUP_LIB_QRZ.lookup_callsign(callsign=call) data = self._lookup_lib_qrz.lookup_callsign(callsign=call)
self.QRZ_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds self._qrz_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
return data return data
except (KeyError, ValueError): except (KeyError, ValueError):
# QRZ had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call. # QRZ had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call.
try: try:
data = self.LOOKUP_LIB_QRZ.lookup_callsign(callsign=callinfo.Callinfo.get_homecall(call)) data = self._lookup_lib_qrz.lookup_callsign(callsign=callinfo.Callinfo.get_homecall(call))
self.QRZ_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds self._qrz_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
return data return data
except (KeyError, ValueError): except (KeyError, ValueError):
# QRZ had no info for the call, that's OK. Cache a None so we don't try to look this up again # QRZ had no info for the call, that's OK. Cache a None so we don't try to look this up again
self.QRZ_CALLSIGN_DATA_CACHE.add(call, None, expire=604800) # 1 week in seconds self._qrz_callsign_data_cache.add(call, None, expire=604800) # 1 week in seconds
return None return None
except (Exception): except Exception:
# General exception like a timeout when communicating with QRZ. Return None this time, but don't cache # General exception like a timeout when communicating with QRZ. Return None this time, but don't cache
# that, so we can try again next time. # that, so we can try again next time.
logging.error("Exception when looking up QRZ data") logging.error("Exception when looking up QRZ data")
@@ -505,17 +451,17 @@ class LookupHelper:
else: else:
return None return None
def get_hamqth_data_for_callsign(self, call): def _get_hamqth_data_for_callsign(self, call):
"""Utility method to get HamQTH data from cache if possible, if not get it from the API and cache it""" """Utility method to get HamQTH data from cache if possible, if not get it from the API and cache it"""
# Fetch from cache if we can, otherwise fetch from the API and cache it # Fetch from cache if we can, otherwise fetch from the API and cache it
if call in self.HAMQTH_CALLSIGN_DATA_CACHE: if call in self._hamqth_callsign_data_cache:
return self.HAMQTH_CALLSIGN_DATA_CACHE.get(call) return self._hamqth_callsign_data_cache.get(call)
elif self.HAMQTH_AVAILABLE: elif self._hamqth_available:
try: try:
# First we need to log in and get a session token. # First we need to log in and get a session token.
session_data = self.HAMQTH_SESSION_LOOKUP_CACHE.get( session_data = self._hamqth_session_lookup_cache.get(
self.HAMQTH_BASE_URL + "?u=" + urllib.parse.quote_plus(config["hamqth-username"]) + self._hamqth_base_url + "?u=" + urllib.parse.quote_plus(config["hamqth-username"]) +
"&p=" + urllib.parse.quote_plus(config["hamqth-password"]), headers=HTTP_HEADERS).content "&p=" + urllib.parse.quote_plus(config["hamqth-password"]), headers=HTTP_HEADERS).content
dict_data = xmltodict.parse(session_data) dict_data = xmltodict.parse(session_data)
if "session_id" in dict_data["HamQTH"]["session"]: if "session_id" in dict_data["HamQTH"]["session"]:
@@ -524,78 +470,79 @@ class LookupHelper:
# Now look up the actual data. # Now look up the actual data.
try: try:
lookup_data = SEMI_STATIC_URL_DATA_CACHE.get( lookup_data = SEMI_STATIC_URL_DATA_CACHE.get(
self.HAMQTH_BASE_URL + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus( self._hamqth_base_url + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus(
call) + "&prg=" + HAMQTH_PRG, headers=HTTP_HEADERS).content call) + "&prg=" + HAMQTH_PRG, headers=HTTP_HEADERS).content
data = xmltodict.parse(lookup_data)["HamQTH"]["search"] data = xmltodict.parse(lookup_data)["HamQTH"]["search"]
self.HAMQTH_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds self._hamqth_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
return data return data
except (KeyError, ValueError): except (KeyError, ValueError):
# HamQTH had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call. # HamQTH had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call.
try: try:
lookup_data = SEMI_STATIC_URL_DATA_CACHE.get( lookup_data = SEMI_STATIC_URL_DATA_CACHE.get(
self.HAMQTH_BASE_URL + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus( self._hamqth_base_url + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus(
callinfo.Callinfo.get_homecall(call)) + "&prg=" + HAMQTH_PRG, callinfo.Callinfo.get_homecall(call)) + "&prg=" + HAMQTH_PRG,
headers=HTTP_HEADERS).content headers=HTTP_HEADERS).content
data = xmltodict.parse(lookup_data)["HamQTH"]["search"] data = xmltodict.parse(lookup_data)["HamQTH"]["search"]
self.HAMQTH_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds self._hamqth_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
return data return data
except (KeyError, ValueError): except (KeyError, ValueError):
# HamQTH had no info for the call, that's OK. Cache a None so we don't try to look this up again # HamQTH had no info for the call, that's OK. Cache a None so we don't try to look this up again
self.HAMQTH_CALLSIGN_DATA_CACHE.add(call, None, expire=604800) # 1 week in seconds self._hamqth_callsign_data_cache.add(call, None, expire=604800) # 1 week in seconds
return None return None
else: else:
logging.warn("HamQTH login details incorrect, failed to look up with HamQTH.") logging.warning("HamQTH login details incorrect, failed to look up with HamQTH.")
except: except:
logging.error("Exception when looking up HamQTH data") logging.error("Exception when looking up HamQTH data")
return None return None
return None
def get_clublog_api_data_for_callsign(self, call): def _get_clublog_api_data_for_callsign(self, call):
"""Utility method to get Clublog API data from cache if possible, if not get it from the API and cache it""" """Utility method to get Clublog API data from cache if possible, if not get it from the API and cache it"""
# Fetch from cache if we can, otherwise fetch from the API and cache it # Fetch from cache if we can, otherwise fetch from the API and cache it
if call in self.CLUBLOG_CALLSIGN_DATA_CACHE: if call in self._clublog_callsign_data_cache:
return self.CLUBLOG_CALLSIGN_DATA_CACHE.get(call) return self._clublog_callsign_data_cache.get(call)
elif self.CLUBLOG_API_AVAILABLE: elif self._clublog_api_available:
try: try:
data = self.LOOKUP_LIB_CLUBLOG_API.lookup_callsign(callsign=call) data = self._lookup_lib_clublog_api.lookup_callsign(callsign=call)
self.CLUBLOG_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds self._clublog_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
return data return data
except (KeyError, ValueError): except (KeyError, ValueError):
# Clublog had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call. # Clublog had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call.
try: try:
data = self.LOOKUP_LIB_CLUBLOG_API.lookup_callsign(callsign=callinfo.Callinfo.get_homecall(call)) data = self._lookup_lib_clublog_api.lookup_callsign(callsign=callinfo.Callinfo.get_homecall(call))
self.CLUBLOG_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds self._clublog_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
return data return data
except (KeyError, ValueError): except (KeyError, ValueError):
# Clublog had no info for the call, that's OK. Cache a None so we don't try to look this up again # Clublog had no info for the call, that's OK. Cache a None so we don't try to look this up again
self.CLUBLOG_CALLSIGN_DATA_CACHE.add(call, None, expire=604800) # 1 week in seconds self._clublog_callsign_data_cache.add(call, None, expire=604800) # 1 week in seconds
return None return None
except APIKeyMissingError: except APIKeyMissingError:
# User API key was wrong, warn # User API key was wrong, warn
logging.error("Could not look up via Clublog API, key " + self.CLUBLOG_API_KEY + " was rejected.") logging.error("Could not look up via Clublog API, key " + self._clublog_api_key + " was rejected.")
return None return None
else: else:
return None return None
def get_clublog_xml_data_for_callsign(self, call): def _get_clublog_xml_data_for_callsign(self, call):
"""Utility method to get Clublog XML data from file""" """Utility method to get Clublog XML data from file"""
if self.CLUBLOG_XML_AVAILABLE: if self._clublog_xml_available:
try: try:
data = self.LOOKUP_LIB_CLUBLOG_XML.lookup_callsign(callsign=call) data = self._lookup_lib_clublog_xml.lookup_callsign(callsign=call)
return data return data
except (KeyError, ValueError): except (KeyError, ValueError):
# Clublog had no info for the call, that's OK. Cache a None so we don't try to look this up again # Clublog had no info for the call, that's OK. Cache a None so we don't try to look this up again
self.CLUBLOG_CALLSIGN_DATA_CACHE.add(call, None, expire=604800) # 1 week in seconds self._clublog_callsign_data_cache.add(call, None, expire=604800) # 1 week in seconds
return None return None
else: else:
return None return None
def get_dxcc_data_for_callsign(self, call): def _get_dxcc_data_for_callsign(self, call):
"""Utility method to get generic DXCC data from our lookup table, if we can find it""" """Utility method to get generic DXCC data from our lookup table, if we can find it"""
for entry in self.DXCC_DATA.values(): for entry in self._dxcc_data.values():
if entry["_prefixRegexCompiled"].match(call): if entry["_prefixRegexCompiled"].match(call):
return entry return entry
return None return None
@@ -603,9 +550,66 @@ class LookupHelper:
def stop(self): def stop(self):
"""Shutdown method to close down any caches neatly.""" """Shutdown method to close down any caches neatly."""
self.QRZ_CALLSIGN_DATA_CACHE.close() self._qrz_callsign_data_cache.close()
self.CLUBLOG_CALLSIGN_DATA_CACHE.close() self._clublog_callsign_data_cache.close()
# Singleton object # Singleton object
lookup_helper = LookupHelper() lookup_helper = LookupHelper()
def infer_mode_from_comment(comment):
"""Infer a mode from the comment"""
for mode in ALL_MODES:
if mode in comment.upper():
return mode
for mode in MODE_ALIASES.keys():
if mode in comment.upper():
return MODE_ALIASES[mode]
return None
def infer_mode_type_from_mode(mode):
"""Infer a "mode family" from a mode."""
if mode.upper() in CW_MODES:
return "CW"
elif mode.upper() in PHONE_MODES:
return "PHONE"
elif mode.upper() in DATA_MODES:
return "DATA"
else:
if mode.upper() != "OTHER":
logging.warning("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
return None
def infer_band_from_freq(freq):
"""Infer a band from a frequency in Hz"""
for b in BANDS:
if b.start_freq <= freq <= b.end_freq:
return b
return UNKNOWN_BAND
def infer_mode_from_frequency(freq):
"""Infer a mode from the frequency (in Hz) according to the band plan. Just a guess really."""
try:
khz = freq / 1000.0
mode = freq_to_band(khz)["mode"]
# Some additional common digimode ranges in addition to what the 3rd-party freq_to_band function returns.
# This is mostly here just because freq_to_band is very specific about things like FT8 frequencies, and e.g.
# a spot at 7074.5 kHz will be indicated as LSB, even though it's clearly in the FT8 range. Future updates
# might include other common digimode centres of activity here, but this achieves the main goal of keeping
# large numbers of clearly-FT* spots off the list of people filtering out digimodes.
if (7074 <= khz < 7077) or (10136 <= khz < 10139) or (14074 <= khz < 14077) or (18100 <= khz < 18103) or (
21074 <= khz < 21077) or (24915 <= khz < 24918) or (28074 <= khz < 28077):
mode = "FT8"
if (7047.5 <= khz < 7050.5) or (10140 <= khz < 10143) or (14080 <= khz < 14083) or (
18104 <= khz < 18107) or (21140 <= khz < 21143) or (24919 <= khz < 24922) or (28180 <= khz < 28183):
mode = "FT4"
return mode
except KeyError:
return None

View File

@@ -17,19 +17,20 @@ class StatusReporter:
alert_providers): alert_providers):
"""Constructor""" """Constructor"""
self.status_data = status_data self._status_data = status_data
self.run_interval = run_interval self._run_interval = run_interval
self.web_server = web_server self._web_server = web_server
self.cleanup_timer = cleanup_timer self._cleanup_timer = cleanup_timer
self.spots = spots self._spots = spots
self.spot_providers = spot_providers self._spot_providers = spot_providers
self.alerts = alerts self._alerts = alerts
self.alert_providers = alert_providers self._alert_providers = alert_providers
self._thread = None
self._stop_event = Event() self._stop_event = Event()
self.startup_time = datetime.now(pytz.UTC) self._startup_time = datetime.now(pytz.UTC)
self.status_data["software-version"] = SOFTWARE_VERSION self._status_data["software-version"] = SOFTWARE_VERSION
self.status_data["server-owner-callsign"] = SERVER_OWNER_CALLSIGN self._status_data["server-owner-callsign"] = SERVER_OWNER_CALLSIGN
def start(self): def start(self):
"""Start the reporter thread""" """Start the reporter thread"""
@@ -47,44 +48,44 @@ class StatusReporter:
while True: while True:
self._report() self._report()
if self._stop_event.wait(timeout=self.run_interval): if self._stop_event.wait(timeout=self._run_interval):
break break
def _report(self): def _report(self):
"""Write status information""" """Write status information"""
self.status_data["uptime"] = (datetime.now(pytz.UTC) - self.startup_time).total_seconds() self._status_data["uptime"] = (datetime.now(pytz.UTC) - self._startup_time).total_seconds()
self.status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3) self._status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3)
self.status_data["num_spots"] = len(self.spots) self._status_data["num_spots"] = len(self._spots)
self.status_data["num_alerts"] = len(self.alerts) self._status_data["num_alerts"] = len(self._alerts)
self.status_data["spot_providers"] = list( self._status_data["spot_providers"] = list(
map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status,
"last_updated": p.last_update_time.replace( "last_updated": p.last_update_time.replace(
tzinfo=pytz.UTC).timestamp() if p.last_update_time.year > 2000 else 0, tzinfo=pytz.UTC).timestamp() if p.last_update_time.year > 2000 else 0,
"last_spot": p.last_spot_time.replace( "last_spot": p.last_spot_time.replace(
tzinfo=pytz.UTC).timestamp() if p.last_spot_time.year > 2000 else 0}, tzinfo=pytz.UTC).timestamp() if p.last_spot_time.year > 2000 else 0},
self.spot_providers)) self._spot_providers))
self.status_data["alert_providers"] = list( self._status_data["alert_providers"] = list(
map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status,
"last_updated": p.last_update_time.replace( "last_updated": p.last_update_time.replace(
tzinfo=pytz.UTC).timestamp() if p.last_update_time.year > 2000 else 0}, tzinfo=pytz.UTC).timestamp() if p.last_update_time.year > 2000 else 0},
self.alert_providers)) self._alert_providers))
self.status_data["cleanup"] = {"status": self.cleanup_timer.status, self._status_data["cleanup"] = {"status": self._cleanup_timer.status,
"last_ran": self.cleanup_timer.last_cleanup_time.replace( "last_ran": self._cleanup_timer.last_cleanup_time.replace(
tzinfo=pytz.UTC).timestamp() if self.cleanup_timer.last_cleanup_time else 0} tzinfo=pytz.UTC).timestamp() if self._cleanup_timer.last_cleanup_time else 0}
self.status_data["webserver"] = {"status": self.web_server.web_server_metrics["status"], self._status_data["webserver"] = {"status": self._web_server.web_server_metrics["status"],
"last_api_access": self.web_server.web_server_metrics[ "last_api_access": self._web_server.web_server_metrics[
"last_api_access_time"].replace( "last_api_access_time"].replace(
tzinfo=pytz.UTC).timestamp() if self.web_server.web_server_metrics[ tzinfo=pytz.UTC).timestamp() if self._web_server.web_server_metrics[
"last_api_access_time"] else 0, "last_api_access_time"] else 0,
"api_access_count": self.web_server.web_server_metrics["api_access_counter"], "api_access_count": self._web_server.web_server_metrics["api_access_counter"],
"last_page_access": self.web_server.web_server_metrics[ "last_page_access": self._web_server.web_server_metrics[
"last_page_access_time"].replace( "last_page_access_time"].replace(
tzinfo=pytz.UTC).timestamp() if self.web_server.web_server_metrics[ tzinfo=pytz.UTC).timestamp() if self._web_server.web_server_metrics[
"last_page_access_time"] else 0, "last_page_access_time"] else 0,
"page_access_count": self.web_server.web_server_metrics["page_access_counter"]} "page_access_count": self._web_server.web_server_metrics["page_access_counter"]}
# Update Prometheus metrics # Update Prometheus metrics
memory_use_gauge.set(psutil.Process(os.getpid()).memory_info().rss * 1024) memory_use_gauge.set(psutil.Process(os.getpid()).memory_info().rss * 1024)
spots_gauge.set(len(self.spots)) spots_gauge.set(len(self._spots))
alerts_gauge.set(len(self.alerts)) alerts_gauge.set(len(self._alerts))

View File

@@ -105,7 +105,7 @@ class Alert:
# If the spot itself doesn't have a SIG yet, but we have at least one SIG reference, take that reference's SIG # If the spot itself doesn't have a SIG yet, but we have at least one SIG reference, take that reference's SIG
# and apply it to the whole spot. # and apply it to the whole spot.
if self.sig_refs and len(self.sig_refs) > 0 and not self.sig: if self.sig_refs and len(self.sig_refs) > 0 and self.sig_refs[0] and not self.sig:
self.sig = self.sig_refs[0].sig self.sig = self.sig_refs[0].sig
# DX operator details lookup, using QRZ.com. This should be the last resort compared to taking the data from # DX operator details lookup, using QRZ.com. This should be the last resort compared to taking the data from

View File

@@ -12,7 +12,8 @@ from pyhamtools.locator import locator_to_latlong, latlong_to_locator
from core.config import MAX_SPOT_AGE from core.config import MAX_SPOT_AGE
from core.constants import MODE_ALIASES from core.constants import MODE_ALIASES
from core.geo_utils import lat_lon_to_cq_zone, lat_lon_to_itu_zone from core.geo_utils import lat_lon_to_cq_zone, lat_lon_to_itu_zone
from core.lookup_helper import lookup_helper from core.lookup_helper import lookup_helper, infer_band_from_freq, infer_mode_from_comment, infer_mode_from_frequency, \
infer_mode_type_from_mode
from core.sig_utils import populate_sig_ref_info, ANY_SIG_REGEX, get_ref_regex_for_sig from core.sig_utils import populate_sig_ref_info, ANY_SIG_REGEX, get_ref_regex_for_sig
from data.sig_ref import SIGRef from data.sig_ref import SIGRef
@@ -201,17 +202,17 @@ class Spot:
# Band from frequency # Band from frequency
if self.freq and not self.band: if self.freq and not self.band:
band = lookup_helper.infer_band_from_freq(self.freq) band = infer_band_from_freq(self.freq)
self.band = band.name self.band = band.name
# Mode from comments or bandplan # Mode from comments or bandplan
if self.mode: if self.mode:
self.mode_source = "SPOT" self.mode_source = "SPOT"
if self.comment and not self.mode: if self.comment and not self.mode:
self.mode = lookup_helper.infer_mode_from_comment(self.comment) self.mode = infer_mode_from_comment(self.comment)
self.mode_source = "COMMENT" self.mode_source = "COMMENT"
if self.freq and not self.mode: if self.freq and not self.mode:
self.mode = lookup_helper.infer_mode_from_frequency(self.freq) self.mode = infer_mode_from_frequency(self.freq)
self.mode_source = "BANDPLAN" self.mode_source = "BANDPLAN"
# Normalise mode if necessary. # Normalise mode if necessary.
@@ -220,7 +221,7 @@ class Spot:
# Mode type from mode # Mode type from mode
if self.mode and not self.mode_type: if self.mode and not self.mode_type:
self.mode_type = lookup_helper.infer_mode_type_from_mode(self.mode) self.mode_type = infer_mode_type_from_mode(self.mode)
# If we have a latitude or grid at this point, it can only have been provided by the spot itself # If we have a latitude or grid at this point, it can only have been provided by the spot itself
if self.dx_latitude or self.dx_grid: if self.dx_latitude or self.dx_grid:
@@ -238,7 +239,7 @@ class Spot:
if regex: if regex:
all_comment_ref_matches = re.finditer(r"(^|\W)(" + regex + r")(^|\W)", self.comment, re.IGNORECASE) all_comment_ref_matches = re.finditer(r"(^|\W)(" + regex + r")(^|\W)", self.comment, re.IGNORECASE)
for ref_match in all_comment_ref_matches: for ref_match in all_comment_ref_matches:
self.append_sig_ref_if_missing(SIGRef(id=ref_match.group(2).upper(), sig=sig)) self._append_sig_ref_if_missing(SIGRef(id=ref_match.group(2).upper(), sig=sig))
# See if the comment looks like it contains any SIGs (and optionally SIG references) that we can # See if the comment looks like it contains any SIGs (and optionally SIG references) that we can
# add to the spot. This should catch cluster spot comments like "POTA GB-0001 WWFF GFF-0001" and e.g. POTA # add to the spot. This should catch cluster spot comments like "POTA GB-0001 WWFF GFF-0001" and e.g. POTA
@@ -259,7 +260,7 @@ class Spot:
ref_matches = re.finditer(r"(^|\W)" + found_sig + r"($|\W)(" + ref_regex + r")($|\W)", self.comment, ref_matches = re.finditer(r"(^|\W)" + found_sig + r"($|\W)(" + ref_regex + r")($|\W)", self.comment,
re.IGNORECASE) re.IGNORECASE)
for ref_match in ref_matches: for ref_match in ref_matches:
self.append_sig_ref_if_missing(SIGRef(id=ref_match.group(3).upper(), sig=found_sig)) self._append_sig_ref_if_missing(SIGRef(id=ref_match.group(3).upper(), sig=found_sig))
# Fetch SIG data. In case a particular API doesn't provide a full set of name, lat, lon & grid for a reference # Fetch SIG data. In case a particular API doesn't provide a full set of name, lat, lon & grid for a reference
# in its initial call, we use this code to populate the rest of the data. This includes working out grid refs # in its initial call, we use this code to populate the rest of the data. This includes working out grid refs
@@ -385,7 +386,7 @@ class Spot:
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True) return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True)
def append_sig_ref_if_missing(self, new_sig_ref): def _append_sig_ref_if_missing(self, new_sig_ref):
"""Append a sig_ref to the list, so long as it's not already there.""" """Append a sig_ref to the list, so long as it's not already there."""
if not self.sig_refs: if not self.sig_refs:

View File

@@ -8,7 +8,7 @@ import tornado
from core.config import ALLOW_SPOTTING, MAX_SPOT_AGE from core.config import ALLOW_SPOTTING, MAX_SPOT_AGE
from core.constants import UNKNOWN_BAND from core.constants import UNKNOWN_BAND
from core.lookup_helper import lookup_helper from core.lookup_helper import infer_band_from_freq
from core.prometheus_metrics_handler import api_requests_counter from core.prometheus_metrics_handler import api_requests_counter
from core.sig_utils import get_ref_regex_for_sig from core.sig_utils import get_ref_regex_for_sig
from core.utils import serialize_everything from core.utils import serialize_everything
@@ -20,15 +20,15 @@ class APISpotHandler(tornado.web.RequestHandler):
"""API request handler for /api/v1/spot (POST)""" """API request handler for /api/v1/spot (POST)"""
def initialize(self, spots, web_server_metrics): def initialize(self, spots, web_server_metrics):
self.spots = spots self._spots = spots
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def post(self): def post(self):
try: try:
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
# Reject if not allowed # Reject if not allowed
@@ -97,7 +97,7 @@ class APISpotHandler(tornado.web.RequestHandler):
return return
# Reject if frequency not in a known band # Reject if frequency not in a known band
if lookup_helper.infer_band_from_freq(spot.freq) == UNKNOWN_BAND: if infer_band_from_freq(spot.freq) == UNKNOWN_BAND:
self.set_status(422) self.set_status(422)
self.write(json.dumps("Error - Frequency of " + str(spot.freq / 1000.0) + "kHz is not in a known band.", self.write(json.dumps("Error - Frequency of " + str(spot.freq / 1000.0) + "kHz is not in a known band.",
default=serialize_everything)) default=serialize_everything))
@@ -130,7 +130,7 @@ class APISpotHandler(tornado.web.RequestHandler):
# infer missing data, and add it to our database. # infer missing data, and add it to our database.
spot.source = "API" spot.source = "API"
spot.infer_missing() spot.infer_missing()
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) self._spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
self.write(json.dumps("OK", default=serialize_everything)) self.write(json.dumps("OK", default=serialize_everything))
self.set_status(201) self.set_status(201)

View File

@@ -18,15 +18,15 @@ class APIAlertsHandler(tornado.web.RequestHandler):
"""API request handler for /api/v1/alerts""" """API request handler for /api/v1/alerts"""
def initialize(self, alerts, web_server_metrics): def initialize(self, alerts, web_server_metrics):
self.alerts = alerts self._alerts = alerts
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def get(self): def get(self):
try: try:
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
# request.arguments contains lists for each param key because technically the client can supply multiple, # request.arguments contains lists for each param key because technically the client can supply multiple,
@@ -34,7 +34,7 @@ class APIAlertsHandler(tornado.web.RequestHandler):
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()} query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
# Fetch all alerts matching the query # Fetch all alerts matching the query
data = get_alert_list_with_filters(self.alerts, query_params) data = get_alert_list_with_filters(self._alerts, query_params)
self.write(json.dumps(data, default=serialize_everything)) self.write(json.dumps(data, default=serialize_everything))
self.set_status(200) self.set_status(200)
except ValueError as e: except ValueError as e:
@@ -53,8 +53,8 @@ class APIAlertsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
"""API request handler for /api/v1/alerts/stream""" """API request handler for /api/v1/alerts/stream"""
def initialize(self, sse_alert_queues, web_server_metrics): def initialize(self, sse_alert_queues, web_server_metrics):
self.sse_alert_queues = sse_alert_queues self._sse_alert_queues = sse_alert_queues
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def custom_headers(self): def custom_headers(self):
"""Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data""" """Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data"""
@@ -65,58 +65,58 @@ class APIAlertsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
def open(self): def open(self):
try: try:
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
# request.arguments contains lists for each param key because technically the client can supply multiple, # request.arguments contains lists for each param key because technically the client can supply multiple,
# reduce that to just the first entry, and convert bytes to string # reduce that to just the first entry, and convert bytes to string
self.query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()} self._query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
# Create a alert queue and add it to the web server's list. The web server will fill this when alerts arrive # Create a alert queue and add it to the web server's list. The web server will fill this when alerts arrive
self.alert_queue = Queue(maxsize=SSE_HANDLER_MAX_QUEUE_SIZE) self._alert_queue = Queue(maxsize=SSE_HANDLER_MAX_QUEUE_SIZE)
self.sse_alert_queues.append(self.alert_queue) self._sse_alert_queues.append(self._alert_queue)
# Set up a timed callback to check if anything is in the queue # Set up a timed callback to check if anything is in the queue
self.heartbeat = tornado.ioloop.PeriodicCallback(self._callback, SSE_HANDLER_QUEUE_CHECK_INTERVAL) self._heartbeat = tornado.ioloop.PeriodicCallback(self._callback, SSE_HANDLER_QUEUE_CHECK_INTERVAL)
self.heartbeat.start() self._heartbeat.start()
except Exception as e: except Exception as e:
logging.warn("Exception when serving SSE socket", e) logging.warning("Exception when serving SSE socket", e)
def close(self): def close(self):
"""When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it""" """When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it"""
try: try:
if self.alert_queue in self.sse_alert_queues: if self._alert_queue in self._sse_alert_queues:
self.sse_alert_queues.remove(self.alert_queue) self._sse_alert_queues.remove(self._alert_queue)
empty_queue(self.alert_queue) empty_queue(self._alert_queue)
except: except:
pass pass
try: try:
self.heartbeat.stop() self._heartbeat.stop()
except: except:
pass pass
self.alert_queue = None self._alert_queue = None
super().close() super().close()
def _callback(self): def _callback(self):
"""Callback to check if anything has arrived in the queue, and if so send it to the client""" """Callback to check if anything has arrived in the queue, and if so send it to the client"""
try: try:
if self.alert_queue: if self._alert_queue:
while not self.alert_queue.empty(): while not self._alert_queue.empty():
alert = self.alert_queue.get() alert = self._alert_queue.get()
# If the new alert matches our param filters, send it to the client. If not, ignore it. # If the new alert matches our param filters, send it to the client. If not, ignore it.
if alert_allowed_by_query(alert, self.query_params): if alert_allowed_by_query(alert, self._query_params):
self.write_message(msg=json.dumps(alert, default=serialize_everything)) self.write_message(msg=json.dumps(alert, default=serialize_everything))
if self.alert_queue not in self.sse_alert_queues: if self._alert_queue not in self._sse_alert_queues:
logging.error("Web server cleared up a queue of an active connection!") logging.error("Web server cleared up a queue of an active connection!")
self.close() self.close()
except: except:
logging.warn("Exception in SSE callback, connection will be closed.") logging.warning("Exception in SSE callback, connection will be closed.")
self.close() self.close()

View File

@@ -5,7 +5,6 @@ from datetime import datetime
import pytz import pytz
import tornado import tornado
from pyhamtools.locator import locator_to_latlong
from core.constants import SIGS from core.constants import SIGS
from core.geo_utils import lat_lon_for_grid_sw_corner_plus_size, lat_lon_to_cq_zone, lat_lon_to_itu_zone from core.geo_utils import lat_lon_for_grid_sw_corner_plus_size, lat_lon_to_cq_zone, lat_lon_to_itu_zone
@@ -20,14 +19,14 @@ class APILookupCallHandler(tornado.web.RequestHandler):
"""API request handler for /api/v1/lookup/call""" """API request handler for /api/v1/lookup/call"""
def initialize(self, web_server_metrics): def initialize(self, web_server_metrics):
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def get(self): def get(self):
try: try:
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
# request.arguments contains lists for each param key because technically the client can supply multiple, # request.arguments contains lists for each param key because technically the client can supply multiple,
@@ -80,14 +79,14 @@ class APILookupSIGRefHandler(tornado.web.RequestHandler):
"""API request handler for /api/v1/lookup/sigref""" """API request handler for /api/v1/lookup/sigref"""
def initialize(self, web_server_metrics): def initialize(self, web_server_metrics):
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def get(self): def get(self):
try: try:
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
# request.arguments contains lists for each param key because technically the client can supply multiple, # request.arguments contains lists for each param key because technically the client can supply multiple,
@@ -98,15 +97,15 @@ class APILookupSIGRefHandler(tornado.web.RequestHandler):
# the provided id must match it. # the provided id must match it.
if "sig" in query_params.keys() and "id" in query_params.keys(): if "sig" in query_params.keys() and "id" in query_params.keys():
sig = query_params.get("sig").upper() sig = query_params.get("sig").upper()
id = query_params.get("id").upper() ref_id = query_params.get("id").upper()
if sig in list(map(lambda p: p.name, SIGS)): if sig in list(map(lambda p: p.name, SIGS)):
if not get_ref_regex_for_sig(sig) or re.match(get_ref_regex_for_sig(sig), id): if not get_ref_regex_for_sig(sig) or re.match(get_ref_regex_for_sig(sig), ref_id):
data = populate_sig_ref_info(SIGRef(id=id, sig=sig)) data = populate_sig_ref_info(SIGRef(id=ref_id, sig=sig))
self.write(json.dumps(data, default=serialize_everything)) self.write(json.dumps(data, default=serialize_everything))
else: else:
self.write( self.write(
json.dumps("Error - '" + id + "' does not look like a valid reference ID for " + sig + ".", json.dumps("Error - '" + ref_id + "' does not look like a valid reference ID for " + sig + ".",
default=serialize_everything)) default=serialize_everything))
self.set_status(422) self.set_status(422)
else: else:
@@ -129,14 +128,14 @@ class APILookupGridHandler(tornado.web.RequestHandler):
"""API request handler for /api/v1/lookup/grid""" """API request handler for /api/v1/lookup/grid"""
def initialize(self, web_server_metrics): def initialize(self, web_server_metrics):
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def get(self): def get(self):
try: try:
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
# request.arguments contains lists for each param key because technically the client can supply multiple, # request.arguments contains lists for each param key because technically the client can supply multiple,

View File

@@ -4,7 +4,7 @@ from datetime import datetime
import pytz import pytz
import tornado import tornado
from core.config import MAX_SPOT_AGE, ALLOW_SPOTTING, WEB_UI_OPTIONS from core.config import MAX_SPOT_AGE, ALLOW_SPOTTING
from core.constants import BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS from core.constants import BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS
from core.prometheus_metrics_handler import api_requests_counter from core.prometheus_metrics_handler import api_requests_counter
from core.utils import serialize_everything from core.utils import serialize_everything
@@ -14,14 +14,14 @@ class APIOptionsHandler(tornado.web.RequestHandler):
"""API request handler for /api/v1/options""" """API request handler for /api/v1/options"""
def initialize(self, status_data, web_server_metrics): def initialize(self, status_data, web_server_metrics):
self.status_data = status_data self._status_data = status_data
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def get(self): def get(self):
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
options = {"bands": BANDS, options = {"bands": BANDS,
@@ -30,9 +30,9 @@ class APIOptionsHandler(tornado.web.RequestHandler):
"sigs": SIGS, "sigs": SIGS,
# Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. # Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available.
"spot_sources": list( "spot_sources": list(
map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), map(lambda p: p["name"], filter(lambda p: p["enabled"], self._status_data["spot_providers"]))),
"alert_sources": list( "alert_sources": list(
map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), map(lambda p: p["name"], filter(lambda p: p["enabled"], self._status_data["alert_providers"]))),
"continents": CONTINENTS, "continents": CONTINENTS,
"max_spot_age": MAX_SPOT_AGE, "max_spot_age": MAX_SPOT_AGE,
"spot_allowed": ALLOW_SPOTTING} "spot_allowed": ALLOW_SPOTTING}

View File

@@ -18,15 +18,15 @@ class APISpotsHandler(tornado.web.RequestHandler):
"""API request handler for /api/v1/spots""" """API request handler for /api/v1/spots"""
def initialize(self, spots, web_server_metrics): def initialize(self, spots, web_server_metrics):
self.spots = spots self._spots = spots
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def get(self): def get(self):
try: try:
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
# request.arguments contains lists for each param key because technically the client can supply multiple, # request.arguments contains lists for each param key because technically the client can supply multiple,
@@ -34,7 +34,7 @@ class APISpotsHandler(tornado.web.RequestHandler):
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()} query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
# Fetch all spots matching the query # Fetch all spots matching the query
data = get_spot_list_with_filters(self.spots, query_params) data = get_spot_list_with_filters(self._spots, query_params)
self.write(json.dumps(data, default=serialize_everything)) self.write(json.dumps(data, default=serialize_everything))
self.set_status(200) self.set_status(200)
except ValueError as e: except ValueError as e:
@@ -53,8 +53,8 @@ class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
"""API request handler for /api/v1/spots/stream""" """API request handler for /api/v1/spots/stream"""
def initialize(self, sse_spot_queues, web_server_metrics): def initialize(self, sse_spot_queues, web_server_metrics):
self.sse_spot_queues = sse_spot_queues self._sse_spot_queues = sse_spot_queues
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def custom_headers(self): def custom_headers(self):
"""Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data""" """Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data"""
@@ -67,58 +67,58 @@ class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
try: try:
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
# request.arguments contains lists for each param key because technically the client can supply multiple, # request.arguments contains lists for each param key because technically the client can supply multiple,
# reduce that to just the first entry, and convert bytes to string # reduce that to just the first entry, and convert bytes to string
self.query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()} self._query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
# Create a spot queue and add it to the web server's list. The web server will fill this when spots arrive # Create a spot queue and add it to the web server's list. The web server will fill this when spots arrive
self.spot_queue = Queue(maxsize=SSE_HANDLER_MAX_QUEUE_SIZE) self._spot_queue = Queue(maxsize=SSE_HANDLER_MAX_QUEUE_SIZE)
self.sse_spot_queues.append(self.spot_queue) self._sse_spot_queues.append(self._spot_queue)
# Set up a timed callback to check if anything is in the queue # Set up a timed callback to check if anything is in the queue
self.heartbeat = tornado.ioloop.PeriodicCallback(self._callback, SSE_HANDLER_QUEUE_CHECK_INTERVAL) self._heartbeat = tornado.ioloop.PeriodicCallback(self._callback, SSE_HANDLER_QUEUE_CHECK_INTERVAL)
self.heartbeat.start() self._heartbeat.start()
except Exception as e: except Exception as e:
logging.warn("Exception when serving SSE socket", e) logging.warning("Exception when serving SSE socket", e)
def close(self): def close(self):
"""When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it""" """When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it"""
try: try:
if self.spot_queue in self.sse_spot_queues: if self._spot_queue in self._sse_spot_queues:
self.sse_spot_queues.remove(self.spot_queue) self._sse_spot_queues.remove(self._spot_queue)
empty_queue(self.spot_queue) empty_queue(self._spot_queue)
except: except:
pass pass
try: try:
self.heartbeat.stop() self._heartbeat.stop()
except: except:
pass pass
self.spot_queue = None self._spot_queue = None
super().close() super().close()
def _callback(self): def _callback(self):
"""Callback to check if anything has arrived in the queue, and if so send it to the client""" """Callback to check if anything has arrived in the queue, and if so send it to the client"""
try: try:
if self.spot_queue: if self._spot_queue:
while not self.spot_queue.empty(): while not self._spot_queue.empty():
spot = self.spot_queue.get() spot = self._spot_queue.get()
# If the new spot matches our param filters, send it to the client. If not, ignore it. # If the new spot matches our param filters, send it to the client. If not, ignore it.
if spot_allowed_by_query(spot, self.query_params): if spot_allowed_by_query(spot, self._query_params):
self.write_message(msg=json.dumps(spot, default=serialize_everything)) self.write_message(msg=json.dumps(spot, default=serialize_everything))
if self.spot_queue not in self.sse_spot_queues: if self._spot_queue not in self._sse_spot_queues:
logging.error("Web server cleared up a queue of an active connection!") logging.error("Web server cleared up a queue of an active connection!")
self.close() self.close()
except: except:
logging.warn("Exception in SSE callback, connection will be closed.") logging.warning("Exception in SSE callback, connection will be closed.")
self.close() self.close()

View File

@@ -12,17 +12,17 @@ class APIStatusHandler(tornado.web.RequestHandler):
"""API request handler for /api/v1/status""" """API request handler for /api/v1/status"""
def initialize(self, status_data, web_server_metrics): def initialize(self, status_data, web_server_metrics):
self.status_data = status_data self._status_data = status_data
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def get(self): def get(self):
# Metrics # Metrics
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["api_access_counter"] += 1 self._web_server_metrics["api_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
api_requests_counter.inc() api_requests_counter.inc()
self.write(json.dumps(self.status_data, default=serialize_everything)) self.write(json.dumps(self._status_data, default=serialize_everything))
self.set_status(200) self.set_status(200)
self.set_header("Cache-Control", "no-store") self.set_header("Cache-Control", "no-store")
self.set_header("Content-Type", "application/json") self.set_header("Content-Type", "application/json")

View File

@@ -12,16 +12,16 @@ class PageTemplateHandler(tornado.web.RequestHandler):
"""Handler for all HTML pages generated from templates""" """Handler for all HTML pages generated from templates"""
def initialize(self, template_name, web_server_metrics): def initialize(self, template_name, web_server_metrics):
self.template_name = template_name self._template_name = template_name
self.web_server_metrics = web_server_metrics self._web_server_metrics = web_server_metrics
def get(self): def get(self):
# Metrics # Metrics
self.web_server_metrics["last_page_access_time"] = datetime.now(pytz.UTC) self._web_server_metrics["last_page_access_time"] = datetime.now(pytz.UTC)
self.web_server_metrics["page_access_counter"] += 1 self._web_server_metrics["page_access_counter"] += 1
self.web_server_metrics["status"] = "OK" self._web_server_metrics["status"] = "OK"
page_requests_counter.inc() page_requests_counter.inc()
# Load named template, and provide variables used in templates # Load named template, and provide variables used in templates
self.render(self.template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING, self.render(self._template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING,
web_ui_options=WEB_UI_OPTIONS) web_ui_options=WEB_UI_OPTIONS)

View File

@@ -22,13 +22,13 @@ class WebServer:
def __init__(self, spots, alerts, status_data, port): def __init__(self, spots, alerts, status_data, port):
"""Constructor""" """Constructor"""
self.spots = spots self._spots = spots
self.alerts = alerts self._alerts = alerts
self.sse_spot_queues = [] self._sse_spot_queues = []
self.sse_alert_queues = [] self._sse_alert_queues = []
self.status_data = status_data self._status_data = status_data
self.port = port self._port = port
self.shutdown_event = asyncio.Event() self._shutdown_event = asyncio.Event()
self.web_server_metrics = { self.web_server_metrics = {
"last_page_access_time": None, "last_page_access_time": None,
"last_api_access_time": None, "last_api_access_time": None,
@@ -40,33 +40,33 @@ class WebServer:
def start(self): def start(self):
"""Start the web server""" """Start the web server"""
asyncio.run(self.start_inner()) asyncio.run(self._start_inner())
def stop(self): def stop(self):
"""Stop the web server""" """Stop the web server"""
self.shutdown_event.set() self._shutdown_event.set()
async def start_inner(self): async def _start_inner(self):
"""Start method (async). Sets up the Tornado application.""" """Start method (async). Sets up the Tornado application."""
app = tornado.web.Application([ app = tornado.web.Application([
# Routes for API calls # Routes for API calls
(r"/api/v1/spots", APISpotsHandler, {"spots": self.spots, "web_server_metrics": self.web_server_metrics}), (r"/api/v1/spots", APISpotsHandler, {"spots": self._spots, "web_server_metrics": self.web_server_metrics}),
(r"/api/v1/alerts", APIAlertsHandler, (r"/api/v1/alerts", APIAlertsHandler,
{"alerts": self.alerts, "web_server_metrics": self.web_server_metrics}), {"alerts": self._alerts, "web_server_metrics": self.web_server_metrics}),
(r"/api/v1/spots/stream", APISpotsStreamHandler, (r"/api/v1/spots/stream", APISpotsStreamHandler,
{"sse_spot_queues": self.sse_spot_queues, "web_server_metrics": self.web_server_metrics}), {"sse_spot_queues": self._sse_spot_queues, "web_server_metrics": self.web_server_metrics}),
(r"/api/v1/alerts/stream", APIAlertsStreamHandler, (r"/api/v1/alerts/stream", APIAlertsStreamHandler,
{"sse_alert_queues": self.sse_alert_queues, "web_server_metrics": self.web_server_metrics}), {"sse_alert_queues": self._sse_alert_queues, "web_server_metrics": self.web_server_metrics}),
(r"/api/v1/options", APIOptionsHandler, (r"/api/v1/options", APIOptionsHandler,
{"status_data": self.status_data, "web_server_metrics": self.web_server_metrics}), {"status_data": self._status_data, "web_server_metrics": self.web_server_metrics}),
(r"/api/v1/status", APIStatusHandler, (r"/api/v1/status", APIStatusHandler,
{"status_data": self.status_data, "web_server_metrics": self.web_server_metrics}), {"status_data": self._status_data, "web_server_metrics": self.web_server_metrics}),
(r"/api/v1/lookup/call", APILookupCallHandler, {"web_server_metrics": self.web_server_metrics}), (r"/api/v1/lookup/call", APILookupCallHandler, {"web_server_metrics": self.web_server_metrics}),
(r"/api/v1/lookup/sigref", APILookupSIGRefHandler, {"web_server_metrics": self.web_server_metrics}), (r"/api/v1/lookup/sigref", APILookupSIGRefHandler, {"web_server_metrics": self.web_server_metrics}),
(r"/api/v1/lookup/grid", APILookupGridHandler, {"web_server_metrics": self.web_server_metrics}), (r"/api/v1/lookup/grid", APILookupGridHandler, {"web_server_metrics": self.web_server_metrics}),
(r"/api/v1/spot", APISpotHandler, {"spots": self.spots, "web_server_metrics": self.web_server_metrics}), (r"/api/v1/spot", APISpotHandler, {"spots": self._spots, "web_server_metrics": self.web_server_metrics}),
# Routes for templated pages # Routes for templated pages
(r"/", PageTemplateHandler, {"template_name": "spots", "web_server_metrics": self.web_server_metrics}), (r"/", PageTemplateHandler, {"template_name": "spots", "web_server_metrics": self.web_server_metrics}),
(r"/map", PageTemplateHandler, {"template_name": "map", "web_server_metrics": self.web_server_metrics}), (r"/map", PageTemplateHandler, {"template_name": "map", "web_server_metrics": self.web_server_metrics}),
@@ -87,14 +87,14 @@ class WebServer:
], ],
template_path=os.path.join(os.path.dirname(__file__), "../templates"), template_path=os.path.join(os.path.dirname(__file__), "../templates"),
debug=False) debug=False)
app.listen(self.port) app.listen(self._port)
await self.shutdown_event.wait() await self._shutdown_event.wait()
def notify_new_spot(self, spot): def notify_new_spot(self, spot):
"""Internal method called when a new spot is added to the system. This is used to ping any SSE clients that are """Internal method called when a new spot is added to the system. This is used to ping any SSE clients that are
awaiting a server-sent message with new spots.""" awaiting a server-sent message with new spots."""
for queue in self.sse_spot_queues: for queue in self._sse_spot_queues:
try: try:
queue.put(spot) queue.put(spot)
except: except:
@@ -106,7 +106,7 @@ class WebServer:
"""Internal method called when a new alert is added to the system. This is used to ping any SSE clients that are """Internal method called when a new alert is added to the system. This is used to ping any SSE clients that are
awaiting a server-sent message with new spots.""" awaiting a server-sent message with new spots."""
for queue in self.sse_alert_queues: for queue in self._sse_alert_queues:
try: try:
queue.put(alert) queue.put(alert)
except: except:
@@ -118,22 +118,22 @@ class WebServer:
"""Clean up any SSE queues that are growing too large; probably their client disconnected and we didn't catch it """Clean up any SSE queues that are growing too large; probably their client disconnected and we didn't catch it
properly for some reason.""" properly for some reason."""
for q in self.sse_spot_queues: for q in self._sse_spot_queues:
try: try:
if q.full(): if q.full():
logging.warn( logging.warning(
"A full SSE spot queue was found, presumably because the client disconnected strangely. It has been removed.") "A full SSE spot queue was found, presumably because the client disconnected strangely. It has been removed.")
self.sse_spot_queues.remove(q) self._sse_spot_queues.remove(q)
empty_queue(q) empty_queue(q)
except: except:
# Probably got deleted already on another thread # Probably got deleted already on another thread
pass pass
for q in self.sse_alert_queues: for q in self._sse_alert_queues:
try: try:
if q.full(): if q.full():
logging.warn( logging.warning(
"A full SSE alert queue was found, presumably because the client disconnected strangely. It has been removed.") "A full SSE alert queue was found, presumably because the client disconnected strangely. It has been removed.")
self.sse_alert_queues.remove(q) self._sse_alert_queues.remove(q)
empty_queue(q) empty_queue(q)
except: except:
# Probably got deleted already on another thread # Probably got deleted already on another thread

View File

@@ -32,12 +32,12 @@ def shutdown(sig, frame):
logging.info("Stopping program...") logging.info("Stopping program...")
web_server.stop() web_server.stop()
for p in spot_providers: for sp in spot_providers:
if p.enabled: if sp.enabled:
p.stop() sp.stop()
for p in alert_providers: for ap in alert_providers:
if p.enabled: if ap.enabled:
p.stop() ap.stop()
cleanup_timer.stop() cleanup_timer.stop()
lookup_helper.stop() lookup_helper.stop()
spots.close() spots.close()

View File

@@ -15,27 +15,27 @@ class APRSIS(SpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config) super().__init__(provider_config)
self.thread = Thread(target=self.connect) self._thread = Thread(target=self._connect)
self.thread.daemon = True self._thread.daemon = True
self.aprsis = None self._aprsis = None
def start(self): def start(self):
self.thread.start() self._thread.start()
def connect(self): def _connect(self):
self.aprsis = aprslib.IS(SERVER_OWNER_CALLSIGN) self._aprsis = aprslib.IS(SERVER_OWNER_CALLSIGN)
self.status = "Connecting" self.status = "Connecting"
logging.info("APRS-IS connecting...") logging.info("APRS-IS connecting...")
self.aprsis.connect() self._aprsis.connect()
self.aprsis.consumer(self.handle) self._aprsis.consumer(self._handle)
logging.info("APRS-IS connected.") logging.info("APRS-IS connected.")
def stop(self): def stop(self):
self.status = "Shutting down" self.status = "Shutting down"
self.aprsis.close() self._aprsis.close()
self.thread.join() self._thread.join()
def handle(self, data): def _handle(self, data):
# Split SSID in "from" call and store separately # Split SSID in "from" call and store separately
from_parts = data["from"].split("-").upper() from_parts = data["from"].split("-").upper()
dx_call = from_parts[0] dx_call = from_parts[0]
@@ -55,7 +55,7 @@ class APRSIS(SpotProvider):
pytz.UTC).timestamp()) # APRS-IS spots are live so we can assume spot time is "now" pytz.UTC).timestamp()) # APRS-IS spots are live so we can assume spot time is "now"
# Add to our list # Add to our list
self.submit(spot) self._submit(spot)
self.status = "OK" self.status = "OK"
self.last_update_time = datetime.now(pytz.UTC) self.last_update_time = datetime.now(pytz.UTC)

View File

@@ -16,62 +16,62 @@ class DXCluster(SpotProvider):
"""Spot provider for a DX Cluster. Hostname, port, login_prompt, login_callsign and allow_rbn_spots are provided in config. """Spot provider for a DX Cluster. Hostname, port, login_prompt, login_callsign and allow_rbn_spots are provided in config.
See config-example.yml for examples.""" See config-example.yml for examples."""
CALLSIGN_PATTERN = "([a-z|0-9|/]+)" _CALLSIGN_PATTERN = "([a-z|0-9|/]+)"
FREQUENCY_PATTERN = "([0-9|.]+)" _FREQUENCY_PATTERN = "([0-9|.]+)"
LINE_PATTERN_EXCLUDE_RBN = re.compile( _LINE_PATTERN_EXCLUDE_RBN = re.compile(
"^DX de " + CALLSIGN_PATTERN + ":\\s+" + FREQUENCY_PATTERN + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)", "^DX de " + _CALLSIGN_PATTERN + ":\\s+" + _FREQUENCY_PATTERN + "\\s+" + _CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)",
re.IGNORECASE) re.IGNORECASE)
LINE_PATTERN_ALLOW_RBN = re.compile( _LINE_PATTERN_ALLOW_RBN = re.compile(
"^DX de " + CALLSIGN_PATTERN + "-?#?:\\s+" + FREQUENCY_PATTERN + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)", "^DX de " + _CALLSIGN_PATTERN + "-?#?:\\s+" + _FREQUENCY_PATTERN + "\\s+" + _CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)",
re.IGNORECASE) re.IGNORECASE)
def __init__(self, provider_config): def __init__(self, provider_config):
"""Constructor requires hostname and port""" """Constructor requires hostname and port"""
super().__init__(provider_config) super().__init__(provider_config)
self.hostname = provider_config["host"] self._hostname = provider_config["host"]
self.port = provider_config["port"] self._port = provider_config["port"]
self.login_prompt = provider_config["login_prompt"] if "login_prompt" in provider_config else "login:" self._login_prompt = provider_config["login_prompt"] if "login_prompt" in provider_config else "login:"
self.login_callsign = provider_config[ self._login_callsign = provider_config[
"login_callsign"] if "login_callsign" in provider_config else SERVER_OWNER_CALLSIGN "login_callsign"] if "login_callsign" in provider_config else SERVER_OWNER_CALLSIGN
self.allow_rbn_spots = provider_config["allow_rbn_spots"] if "allow_rbn_spots" in provider_config else False self._allow_rbn_spots = provider_config["allow_rbn_spots"] if "allow_rbn_spots" in provider_config else False
self.spot_line_pattern = self.LINE_PATTERN_ALLOW_RBN if self.allow_rbn_spots else self.LINE_PATTERN_EXCLUDE_RBN self._spot_line_pattern = self._LINE_PATTERN_ALLOW_RBN if self._allow_rbn_spots else self._LINE_PATTERN_EXCLUDE_RBN
self.telnet = None self._telnet = None
self.thread = Thread(target=self.handle) self._thread = Thread(target=self._handle)
self.thread.daemon = True self._thread.daemon = True
self.run = True self._running = True
def start(self): def start(self):
self.thread.start() self._thread.start()
def stop(self): def stop(self):
self.run = False self._running = False
self.telnet.close() self._telnet.close()
self.thread.join() self._thread.join()
def handle(self): def _handle(self):
while self.run: while self._running:
connected = False connected = False
while not connected and self.run: while not connected and self._running:
try: try:
self.status = "Connecting" self.status = "Connecting"
logging.info("DX Cluster " + self.hostname + " connecting...") logging.info("DX Cluster " + self._hostname + " connecting...")
self.telnet = telnetlib3.Telnet(self.hostname, self.port) self._telnet = telnetlib3.Telnet(self._hostname, self._port)
self.telnet.read_until(self.login_prompt.encode("latin-1")) self._telnet.read_until(self._login_prompt.encode("latin-1"))
self.telnet.write((self.login_callsign + "\n").encode("latin-1")) self._telnet.write((self._login_callsign + "\n").encode("latin-1"))
connected = True connected = True
logging.info("DX Cluster " + self.hostname + " connected.") logging.info("DX Cluster " + self._hostname + " connected.")
except Exception as e: except Exception:
self.status = "Error" self.status = "Error"
logging.exception("Exception while connecting to DX Cluster Provider (" + self.hostname + ").") logging.exception("Exception while connecting to DX Cluster Provider (" + self._hostname + ").")
sleep(5) sleep(5)
self.status = "Waiting for Data" self.status = "Waiting for Data"
while connected and self.run: while connected and self._running:
try: try:
# Check new telnet info against regular expression # Check new telnet info against regular expression
telnet_output = self.telnet.read_until("\n".encode("latin-1")) telnet_output = self._telnet.read_until("\n".encode("latin-1"))
match = self.spot_line_pattern.match(telnet_output.decode("latin-1")) match = self._spot_line_pattern.match(telnet_output.decode("latin-1"))
if match: if match:
spot_time = datetime.strptime(match.group(5), "%H%MZ") spot_time = datetime.strptime(match.group(5), "%H%MZ")
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC) spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
@@ -83,20 +83,20 @@ class DXCluster(SpotProvider):
time=spot_datetime.timestamp()) time=spot_datetime.timestamp())
# Add to our list # Add to our list
self.submit(spot) self._submit(spot)
self.status = "OK" self.status = "OK"
self.last_update_time = datetime.now(pytz.UTC) self.last_update_time = datetime.now(pytz.UTC)
logging.debug("Data received from DX Cluster " + self.hostname + ".") logging.debug("Data received from DX Cluster " + self._hostname + ".")
except Exception as e: except Exception:
connected = False connected = False
if self.run: if self._running:
self.status = "Error" self.status = "Error"
logging.exception("Exception in DX Cluster Provider (" + self.hostname + ")") logging.exception("Exception in DX Cluster Provider (" + self._hostname + ")")
sleep(5) sleep(5)
else: else:
logging.info("DX Cluster " + self.hostname + " shutting down...") logging.info("DX Cluster " + self._hostname + " shutting down...")
self.status = "Shutting down" self.status = "Shutting down"
self.status = "Disconnected" self.status = "Disconnected"

View File

@@ -21,7 +21,7 @@ class GMA(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
# Iterate through source data # Iterate through source data
for source_spot in http_response.json()["RCD"]: for source_spot in http_response.json()["RCD"]:
@@ -77,7 +77,7 @@ class GMA(HTTPSpotProvider):
spot.sig_refs[0].sig = "MOTA" spot.sig_refs[0].sig = "MOTA"
spot.sig = "MOTA" spot.sig = "MOTA"
case _: case _:
logging.warn("GMA spot found with ref type " + ref_info[ logging.warning("GMA spot found with ref type " + ref_info[
"reftype"] + ", developer needs to add support for this!") "reftype"] + ", developer needs to add support for this!")
spot.sig_refs[0].sig = ref_info["reftype"] spot.sig_refs[0].sig = ref_info["reftype"]
spot.sig = ref_info["reftype"] spot.sig = ref_info["reftype"]
@@ -86,6 +86,6 @@ class GMA(HTTPSpotProvider):
# that for us. # that for us.
new_spots.append(spot) new_spots.append(spot)
except: except:
logging.warn("Exception when looking up " + self.REF_INFO_URL_ROOT + source_spot[ logging.warning("Exception when looking up " + self.REF_INFO_URL_ROOT + source_spot[
"REF"] + ", ignoring this spot for now") "REF"] + ", ignoring this spot for now")
return new_spots return new_spots

View File

@@ -24,13 +24,13 @@ class HEMA(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOT_SEED_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOT_SEED_URL, self.POLL_INTERVAL_SEC)
self.spot_seed = "" self._spot_seed = ""
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
# OK, source data is actually just the spot seed at this point. We'll then go on to fetch real data if we know # OK, source data is actually just the spot seed at this point. We'll then go on to fetch real data if we know
# this has changed. # this has changed.
spot_seed_changed = http_response.text != self.spot_seed spot_seed_changed = http_response.text != self._spot_seed
self.spot_seed = http_response.text self._spot_seed = http_response.text
new_spots = [] new_spots = []
# OK, if the spot seed actually changed, now we make the real request for data. # OK, if the spot seed actually changed, now we make the real request for data.

View File

@@ -15,14 +15,15 @@ class HTTPSpotProvider(SpotProvider):
def __init__(self, provider_config, url, poll_interval): def __init__(self, provider_config, url, poll_interval):
super().__init__(provider_config) super().__init__(provider_config)
self.url = url self._url = url
self.poll_interval = poll_interval self._poll_interval = poll_interval
self._thread = None
self._stop_event = Event() self._stop_event = Event()
def start(self): def start(self):
# Fire off the polling thread. It will poll immediately on startup, then sleep for poll_interval between # Fire off the polling thread. It will poll immediately on startup, then sleep for poll_interval between
# subsequent polls, so start() returns immediately and the application can continue starting. # subsequent polls, so start() returns immediately and the application can continue starting.
logging.info("Set up query of " + self.name + " spot API every " + str(self.poll_interval) + " seconds.") logging.info("Set up query of " + self.name + " spot API every " + str(self._poll_interval) + " seconds.")
self._thread = Thread(target=self._run, daemon=True) self._thread = Thread(target=self._run, daemon=True)
self._thread.start() self._thread.start()
@@ -32,30 +33,30 @@ class HTTPSpotProvider(SpotProvider):
def _run(self): def _run(self):
while True: while True:
self._poll() self._poll()
if self._stop_event.wait(timeout=self.poll_interval): if self._stop_event.wait(timeout=self._poll_interval):
break break
def _poll(self): def _poll(self):
try: try:
# Request data from API # Request data from API
logging.debug("Polling " + self.name + " spot API...") logging.debug("Polling " + self.name + " spot API...")
http_response = requests.get(self.url, headers=HTTP_HEADERS) http_response = requests.get(self._url, headers=HTTP_HEADERS)
# Pass off to the subclass for processing # Pass off to the subclass for processing
new_spots = self.http_response_to_spots(http_response) new_spots = self._http_response_to_spots(http_response)
# Submit the new spots for processing. There might not be any spots for the less popular programs. # Submit the new spots for processing. There might not be any spots for the less popular programs.
if new_spots: if new_spots:
self.submit_batch(new_spots) self._submit_batch(new_spots)
self.status = "OK" self.status = "OK"
self.last_update_time = datetime.now(pytz.UTC) self.last_update_time = datetime.now(pytz.UTC)
logging.debug("Received data from " + self.name + " spot API.") logging.debug("Received data from " + self.name + " spot API.")
except Exception as e: except Exception:
self.status = "Error" self.status = "Error"
logging.exception("Exception in HTTP JSON Spot Provider (" + self.name + ")") logging.exception("Exception in HTTP JSON Spot Provider (" + self.name + ")")
self._stop_event.wait(timeout=1) self._stop_event.wait(timeout=1)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
"""Convert an HTTP response returned by the API into spot data. The whole response is provided here so the subclass """Convert an HTTP response returned by the API into spot data. The whole response is provided here so the subclass
implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever
the API actually provides.""" the API actually provides."""

View File

@@ -14,7 +14,7 @@ class LLOTA(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
# Iterate through source data # Iterate through source data
for source_spot in http_response.json(): for source_spot in http_response.json():

View File

@@ -19,7 +19,7 @@ class ParksNPeaks(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
# Iterate through source data # Iterate through source data
for source_spot in http_response.json(): for source_spot in http_response.json():
@@ -50,7 +50,7 @@ class ParksNPeaks(HTTPSpotProvider):
# Log a warning for the developer if PnP gives us an unknown programme we've never seen before # Log a warning for the developer if PnP gives us an unknown programme we've never seen before
if spot.sig_refs[0].sig not in ["POTA", "SOTA", "WWFF", "SIOTA", "ZLOTA", "KRMNPA"]: if spot.sig_refs[0].sig not in ["POTA", "SOTA", "WWFF", "SIOTA", "ZLOTA", "KRMNPA"]:
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to add support for this!") logging.warning("PNP spot found with sig " + spot.sig + ", developer needs to add support for this!")
# If this is POTA, SOTA, WWFF or ZLOTA data we already have it through other means, so ignore. Otherwise, # If this is POTA, SOTA, WWFF or ZLOTA data we already have it through other means, so ignore. Otherwise,
# add to the spot list. # add to the spot list.

View File

@@ -16,7 +16,7 @@ class POTA(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
# Iterate through source data # Iterate through source data
for source_spot in http_response.json(): for source_spot in http_response.json():

View File

@@ -16,53 +16,53 @@ class RBN(SpotProvider):
"""Spot provider for the Reverse Beacon Network. Connects to a single port, if you want both CW/RTTY (port 7000) and FT8 """Spot provider for the Reverse Beacon Network. Connects to a single port, if you want both CW/RTTY (port 7000) and FT8
(port 7001) you need to instantiate two copies of this. The port is provided as an argument to the constructor.""" (port 7001) you need to instantiate two copies of this. The port is provided as an argument to the constructor."""
CALLSIGN_PATTERN = "([a-z|0-9|/]+)" _CALLSIGN_PATTERN = "([a-z|0-9|/]+)"
FREQUENCY_PATTERM = "([0-9|.]+)" _FREQUENCY_PATTERM = "([0-9|.]+)"
LINE_PATTERN = re.compile( _LINE_PATTERN = re.compile(
"^DX de " + CALLSIGN_PATTERN + "-.*:\\s+" + FREQUENCY_PATTERM + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)", "^DX de " + _CALLSIGN_PATTERN + "-.*:\\s+" + _FREQUENCY_PATTERM + "\\s+" + _CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)",
re.IGNORECASE) re.IGNORECASE)
def __init__(self, provider_config): def __init__(self, provider_config):
"""Constructor requires port number.""" """Constructor requires port number."""
super().__init__(provider_config) super().__init__(provider_config)
self.port = provider_config["port"] self._port = provider_config["port"]
self.telnet = None self._telnet = None
self.thread = Thread(target=self.handle) self._thread = Thread(target=self._handle)
self.thread.daemon = True self._thread.daemon = True
self.run = True self._running = True
def start(self): def start(self):
self.thread.start() self._thread.start()
def stop(self): def stop(self):
self.run = False self._running = False
self.telnet.close() self._telnet.close()
self.thread.join() self._thread.join()
def handle(self): def _handle(self):
while self.run: while self._running:
connected = False connected = False
while not connected and self.run: while not connected and self._running:
try: try:
self.status = "Connecting" self.status = "Connecting"
logging.info("RBN port " + str(self.port) + " connecting...") logging.info("RBN port " + str(self._port) + " connecting...")
self.telnet = telnetlib3.Telnet("telnet.reversebeacon.net", self.port) self._telnet = telnetlib3.Telnet("telnet.reversebeacon.net", self._port)
telnet_output = self.telnet.read_until("Please enter your call: ".encode("latin-1")) telnet_output = self._telnet.read_until("Please enter your call: ".encode("latin-1"))
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("latin-1")) self._telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("latin-1"))
connected = True connected = True
logging.info("RBN port " + str(self.port) + " connected.") logging.info("RBN port " + str(self._port) + " connected.")
except Exception as e: except Exception:
self.status = "Error" self.status = "Error"
logging.exception("Exception while connecting to RBN (port " + str(self.port) + ").") logging.exception("Exception while connecting to RBN (port " + str(self._port) + ").")
sleep(5) sleep(5)
self.status = "Waiting for Data" self.status = "Waiting for Data"
while connected and self.run: while connected and self._running:
try: try:
# Check new telnet info against regular expression # Check new telnet info against regular expression
telnet_output = self.telnet.read_until("\n".encode("latin-1")) telnet_output = self._telnet.read_until("\n".encode("latin-1"))
match = self.LINE_PATTERN.match(telnet_output.decode("latin-1")) match = self._LINE_PATTERN.match(telnet_output.decode("latin-1"))
if match: if match:
spot_time = datetime.strptime(match.group(5), "%H%MZ") spot_time = datetime.strptime(match.group(5), "%H%MZ")
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC) spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
@@ -74,20 +74,20 @@ class RBN(SpotProvider):
time=spot_datetime.timestamp()) time=spot_datetime.timestamp())
# Add to our list # Add to our list
self.submit(spot) self._submit(spot)
self.status = "OK" self.status = "OK"
self.last_update_time = datetime.now(pytz.UTC) self.last_update_time = datetime.now(pytz.UTC)
logging.debug("Data received from RBN on port " + str(self.port) + ".") logging.debug("Data received from RBN on port " + str(self._port) + ".")
except Exception as e: except Exception:
connected = False connected = False
if self.run: if self._running:
self.status = "Error" self.status = "Error"
logging.exception("Exception in RBN provider (port " + str(self.port) + ")") logging.exception("Exception in RBN provider (port " + str(self._port) + ")")
sleep(5) sleep(5)
else: else:
logging.info("RBN provider (port " + str(self.port) + ") shutting down...") logging.info("RBN provider (port " + str(self._port) + ") shutting down...")
self.status = "Shutting down" self.status = "Shutting down"
self.status = "Disconnected" self.status = "Disconnected"

View File

@@ -22,13 +22,13 @@ class SOTA(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.EPOCH_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.EPOCH_URL, self.POLL_INTERVAL_SEC)
self.api_epoch = "" self._api_epoch = ""
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
# OK, source data is actually just the epoch at this point. We'll then go on to fetch real data if we know this # OK, source data is actually just the epoch at this point. We'll then go on to fetch real data if we know this
# has changed. # has changed.
epoch_changed = http_response.text != self.api_epoch epoch_changed = http_response.text != self._api_epoch
self.api_epoch = http_response.text self._api_epoch = http_response.text
new_spots = [] new_spots = []
# OK, if the epoch actually changed, now we make the real request for data. # OK, if the epoch actually changed, now we make the real request for data.

View File

@@ -16,21 +16,21 @@ class SpotProvider:
self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC)
self.last_spot_time = datetime.min.replace(tzinfo=pytz.UTC) self.last_spot_time = datetime.min.replace(tzinfo=pytz.UTC)
self.status = "Not Started" if self.enabled else "Disabled" self.status = "Not Started" if self.enabled else "Disabled"
self.spots = None self._spots = None
self.web_server = None self._web_server = None
def setup(self, spots, web_server): def setup(self, spots, web_server):
"""Set up the provider, e.g. giving it the spot list to work from""" """Set up the provider, e.g. giving it the spot list to work from"""
self.spots = spots self._spots = spots
self.web_server = web_server self._web_server = web_server
def start(self): def start(self):
"""Start the provider. This should return immediately after spawning threads to access the remote resources""" """Start the provider. This should return immediately after spawning threads to access the remote resources"""
raise NotImplementedError("Subclasses must implement this method") raise NotImplementedError("Subclasses must implement this method")
def submit_batch(self, spots): def _submit_batch(self, spots):
"""Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved """Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved
by this provider will be added to the spot list, to prevent duplications. Spots passing the check will also have by this provider will be added to the spot list, to prevent duplications. Spots passing the check will also have
their infer_missing() method called to complete their data set. This is called by the API-querying their infer_missing() method called to complete their data set. This is called by the API-querying
@@ -38,30 +38,30 @@ class SpotProvider:
# Sort the batch so that earliest ones go in first. This helps keep the ordering correct when spots are fired # Sort the batch so that earliest ones go in first. This helps keep the ordering correct when spots are fired
# off to SSE listeners. # off to SSE listeners.
spots = sorted(spots, key=lambda spot: (spot.time if spot and spot.time else 0)) spots = sorted(spots, key=lambda s: (s.time if s and s.time else 0))
for spot in spots: for spot in spots:
if datetime.fromtimestamp(spot.time, pytz.UTC) > self.last_spot_time: if datetime.fromtimestamp(spot.time, pytz.UTC) > self.last_spot_time:
# Fill in any blanks and add to the list # Fill in any blanks and add to the list
spot.infer_missing() spot.infer_missing()
self.add_spot(spot) self._add_spot(spot)
self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC) self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC)
def submit(self, spot): def _submit(self, spot):
"""Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots """Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots
passing the check will also have their infer_missing() method called to complete their data set. This is called by passing the check will also have their infer_missing() method called to complete their data set. This is called by
the data streaming subclasses, which can be relied upon not to re-provide old spots.""" the data streaming subclasses, which can be relied upon not to re-provide old spots."""
# Fill in any blanks and add to the list # Fill in any blanks and add to the list
spot.infer_missing() spot.infer_missing()
self.add_spot(spot) self._add_spot(spot)
self.last_spot_time = datetime.fromtimestamp(spot.time, pytz.UTC) self.last_spot_time = datetime.fromtimestamp(spot.time, pytz.UTC)
def add_spot(self, spot): def _add_spot(self, spot):
if not spot.expired(): if not spot.expired():
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) self._spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
# Ping the web server in case we have any SSE connections that need to see this immediately # Ping the web server in case we have any SSE connections that need to see this immediately
if self.web_server: if self._web_server:
self.web_server.notify_new_spot(spot) self._web_server.notify_new_spot(spot)
def stop(self): def stop(self):
"""Stop any threads and prepare for application shutdown""" """Stop any threads and prepare for application shutdown"""

View File

@@ -15,25 +15,25 @@ class SSESpotProvider(SpotProvider):
def __init__(self, provider_config, url): def __init__(self, provider_config, url):
super().__init__(provider_config) super().__init__(provider_config)
self.url = url self._url = url
self.event_source = None self._event_source = None
self.thread = None self._thread = None
self.stopped = False self._stopped = False
self.last_event_id = None self._last_event_id = None
def start(self): def start(self):
logging.info("Set up SSE connection to " + self.name + " spot API.") logging.info("Set up SSE connection to " + self.name + " spot API.")
self.stopped = False self._stopped = False
self.thread = Thread(target=self.run) self._thread = Thread(target=self._run)
self.thread.daemon = True self._thread.daemon = True
self.thread.start() self._thread.start()
def stop(self): def stop(self):
self.stopped = True self._stopped = True
if self.event_source: if self._event_source:
self.event_source.close() self._event_source.close()
if self.thread: if self._thread:
self.thread.join() self._thread.join()
def _on_open(self): def _on_open(self):
self.status = "Waiting for Data" self.status = "Waiting for Data"
@@ -41,38 +41,38 @@ class SSESpotProvider(SpotProvider):
def _on_error(self): def _on_error(self):
self.status = "Connecting" self.status = "Connecting"
def run(self): def _run(self):
while not self.stopped: while not self._stopped:
try: try:
logging.debug("Connecting to " + self.name + " spot API...") logging.debug("Connecting to " + self.name + " spot API...")
self.status = "Connecting" self.status = "Connecting"
with EventSource(self.url, headers=HTTP_HEADERS, latest_event_id=self.last_event_id, timeout=30, with EventSource(self._url, headers=HTTP_HEADERS, latest_event_id=self._last_event_id, timeout=30,
on_open=self._on_open, on_error=self._on_error) as event_source: on_open=self._on_open, on_error=self._on_error) as event_source:
self.event_source = event_source self._event_source = event_source
for event in self.event_source: for event in self._event_source:
if event.type == 'message': if event.type == 'message':
try: try:
self.last_event_id = event.last_event_id self._last_event_id = event.last_event_id
new_spot = self.sse_message_to_spot(event.data) new_spot = self._sse_message_to_spot(event.data)
if new_spot: if new_spot:
self.submit(new_spot) self._submit(new_spot)
self.status = "OK" self.status = "OK"
self.last_update_time = datetime.now(pytz.UTC) self.last_update_time = datetime.now(pytz.UTC)
logging.debug("Received data from " + self.name + " spot API.") logging.debug("Received data from " + self.name + " spot API.")
except Exception as e: except Exception:
logging.exception( logging.exception(
"Exception processing message from SSE Spot Provider (" + self.name + ")") "Exception processing message from SSE Spot Provider (" + self.name + ")")
except Exception as e: except Exception:
self.status = "Error" self.status = "Error"
logging.exception("Exception in SSE Spot Provider (" + self.name + ")") logging.exception("Exception in SSE Spot Provider (" + self.name + ")")
else: else:
self.status = "Disconnected" self.status = "Disconnected"
sleep(5) # Wait before trying to reconnect sleep(5) # Wait before trying to reconnect
def sse_message_to_spot(self, message_data): def _sse_message_to_spot(self, message_data):
"""Convert an SSE message received from the API into a spot. The whole message data is provided here so the subclass """Convert an SSE message received from the API into a spot. The whole message data is provided here so the subclass
implementations can handle the message as JSON, XML, text, whatever the API actually provides.""" implementations can handle the message as JSON, XML, text, whatever the API actually provides."""

View File

@@ -16,7 +16,7 @@ class UKPacketNet(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
# Iterate through source data # Iterate through source data
nodes = http_response.json()["nodes"] nodes = http_response.json()["nodes"]

View File

@@ -15,25 +15,25 @@ class WebsocketSpotProvider(SpotProvider):
def __init__(self, provider_config, url): def __init__(self, provider_config, url):
super().__init__(provider_config) super().__init__(provider_config)
self.url = url self._url = url
self.ws = None self._ws = None
self.thread = None self._thread = None
self.stopped = False self._stopped = False
self.last_event_id = None self._last_event_id = None
def start(self): def start(self):
logging.info("Set up websocket connection to " + self.name + " spot API.") logging.info("Set up websocket connection to " + self.name + " spot API.")
self.stopped = False self._stopped = False
self.thread = Thread(target=self.run) self._thread = Thread(target=self._run)
self.thread.daemon = True self._thread.daemon = True
self.thread.start() self._thread.start()
def stop(self): def stop(self):
self.stopped = True self._stopped = True
if self.ws: if self._ws:
self.ws.close() self._ws.close()
if self.thread: if self._thread:
self.thread.join() self._thread.join()
def _on_open(self): def _on_open(self):
self.status = "Waiting for Data" self.status = "Waiting for Data"
@@ -41,25 +41,25 @@ class WebsocketSpotProvider(SpotProvider):
def _on_error(self): def _on_error(self):
self.status = "Connecting" self.status = "Connecting"
def run(self): def _run(self):
while not self.stopped: while not self._stopped:
try: try:
logging.debug("Connecting to " + self.name + " spot API...") logging.debug("Connecting to " + self.name + " spot API...")
self.status = "Connecting" self.status = "Connecting"
self.ws = create_connection(self.url, header=HTTP_HEADERS) self._ws = create_connection(self._url, header=HTTP_HEADERS)
self.status = "Connected" self.status = "Connected"
data = self.ws.recv() data = self._ws.recv()
if data: if data:
try: try:
new_spot = self.ws_message_to_spot(data) new_spot = self._ws_message_to_spot(data)
if new_spot: if new_spot:
self.submit(new_spot) self._submit(new_spot)
self.status = "OK" self.status = "OK"
self.last_update_time = datetime.now(pytz.UTC) self.last_update_time = datetime.now(pytz.UTC)
logging.debug("Received data from " + self.name + " spot API.") logging.debug("Received data from " + self.name + " spot API.")
except Exception as e: except Exception:
logging.exception( logging.exception(
"Exception processing message from Websocket Spot Provider (" + self.name + ")") "Exception processing message from Websocket Spot Provider (" + self.name + ")")
@@ -70,7 +70,7 @@ class WebsocketSpotProvider(SpotProvider):
self.status = "Disconnected" self.status = "Disconnected"
sleep(5) # Wait before trying to reconnect sleep(5) # Wait before trying to reconnect
def ws_message_to_spot(self, bytes): def _ws_message_to_spot(self, b):
"""Convert a WS message received from the API into a spot. The exact message data (in bytes) is provided here so the """Convert a WS message received from the API into a spot. The exact message data (in bytes) is provided here so the
subclass implementations can handle the message as string, JSON, XML, whatever the API actually provides.""" subclass implementations can handle the message as string, JSON, XML, whatever the API actually provides."""

View File

@@ -21,7 +21,7 @@ class WOTA(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
rss = RSSParser.parse(http_response.content.decode()) rss = RSSParser.parse(http_response.content.decode())
# Iterate through source data # Iterate through source data
@@ -48,6 +48,7 @@ class WOTA(HTTPSpotProvider):
freq_mode = desc_split[0].replace("Frequencies/modes:", "").strip() freq_mode = desc_split[0].replace("Frequencies/modes:", "").strip()
freq_mode_split = re.split(r'[\-\s]+', freq_mode) freq_mode_split = re.split(r'[\-\s]+', freq_mode)
freq_hz = float(freq_mode_split[0]) * 1000000 freq_hz = float(freq_mode_split[0]) * 1000000
mode = None
if len(freq_mode_split) > 1: if len(freq_mode_split) > 1:
mode = freq_mode_split[1].upper() mode = freq_mode_split[1].upper()

View File

@@ -14,7 +14,7 @@ class WWBOTA(SSESpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL) super().__init__(provider_config, self.SPOTS_URL)
def sse_message_to_spot(self, message): def _sse_message_to_spot(self, message):
source_spot = json.loads(message) source_spot = json.loads(message)
# Convert to our spot format. First we unpack references, because WWBOTA spots can have more than one for # Convert to our spot format. First we unpack references, because WWBOTA spots can have more than one for
# n-fer activations. # n-fer activations.

View File

@@ -16,7 +16,7 @@ class WWFF(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
# Iterate through source data # Iterate through source data
for source_spot in http_response.json(): for source_spot in http_response.json():

View File

@@ -16,7 +16,7 @@ class WWTOTA(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
response_fixed = http_response.text.replace("\\/", "/") response_fixed = http_response.text.replace("\\/", "/")
response_json = json.loads(response_fixed) response_json = json.loads(response_fixed)

View File

@@ -36,8 +36,8 @@ class XOTA(WebsocketSpotProvider):
except: except:
logging.exception("Could not look up location data for XOTA source.") logging.exception("Could not look up location data for XOTA source.")
def ws_message_to_spot(self, bytes): def _ws_message_to_spot(self, b):
string = bytes.decode("utf-8") string = b.decode("utf-8")
source_spot = json.loads(string) source_spot = json.loads(string)
ref_id = source_spot["reference"]["title"] ref_id = source_spot["reference"]["title"]
lat = float(self.LOCATION_DATA[ref_id]["lat"]) if ref_id in self.LOCATION_DATA else None lat = float(self.LOCATION_DATA[ref_id]["lat"]) if ref_id in self.LOCATION_DATA else None

View File

@@ -17,7 +17,7 @@ class ZLOTA(HTTPSpotProvider):
def __init__(self, provider_config): def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC) super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response): def _http_response_to_spots(self, http_response):
new_spots = [] new_spots = []
# Iterate through source data # Iterate through source data
for source_spot in http_response.json(): for source_spot in http_response.json():

View File

@@ -66,7 +66,7 @@
<p>This software is dedicated to the memory of Tom G1PJB, SK, a friend and colleague who sadly passed away around the time I started writing it in Autumn 2025. I was looking forward to showing it to you when it was done.</p> <p>This software is dedicated to the memory of Tom G1PJB, SK, a friend and colleague who sadly passed away around the time I started writing it in Autumn 2025. I was looking forward to showing it to you when it was done.</p>
</div> </div>
<script src="/js/common.js?v=1772202095"></script> <script src="/js/common.js?v=1772219824"></script>
<script>$(document).ready(function() { $("#nav-link-about").addClass("active"); }); <!-- highlight active page in nav --></script> <script>$(document).ready(function() { $("#nav-link-about").addClass("active"); }); <!-- highlight active page in nav --></script>
{% end %} {% end %}

View File

@@ -69,8 +69,8 @@
</div> </div>
<script src="/js/common.js?v=1772202095"></script> <script src="/js/common.js?v=1772219824"></script>
<script src="/js/add-spot.js?v=1772202095"></script> <script src="/js/add-spot.js?v=1772219824"></script>
<script>$(document).ready(function() { $("#nav-link-add-spot").addClass("active"); }); <!-- highlight active page in nav --></script> <script>$(document).ready(function() { $("#nav-link-add-spot").addClass("active"); }); <!-- highlight active page in nav --></script>
{% end %} {% end %}

View File

@@ -56,8 +56,8 @@
</div> </div>
<script src="/js/common.js?v=1772202096"></script> <script src="/js/common.js?v=1772219824"></script>
<script src="/js/alerts.js?v=1772202096"></script> <script src="/js/alerts.js?v=1772219824"></script>
<script>$(document).ready(function() { $("#nav-link-alerts").addClass("active"); }); <!-- highlight active page in nav --></script> <script>$(document).ready(function() { $("#nav-link-alerts").addClass("active"); }); <!-- highlight active page in nav --></script>
{% end %} {% end %}

View File

@@ -62,9 +62,9 @@
<script> <script>
let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %}; let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %};
</script> </script>
<script src="/js/common.js?v=1772202095"></script> <script src="/js/common.js?v=1772219824"></script>
<script src="/js/spotsbandsandmap.js?v=1772202095"></script> <script src="/js/spotsbandsandmap.js?v=1772219824"></script>
<script src="/js/bands.js?v=1772202095"></script> <script src="/js/bands.js?v=1772219824"></script>
<script>$(document).ready(function() { $("#nav-link-bands").addClass("active"); }); <!-- highlight active page in nav --></script> <script>$(document).ready(function() { $("#nav-link-bands").addClass("active"); }); <!-- highlight active page in nav --></script>
{% end %} {% end %}

View File

@@ -46,10 +46,10 @@
crossorigin="anonymous"></script> crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/tinycolor2@1.6.0/cjs/tinycolor.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/tinycolor2@1.6.0/cjs/tinycolor.min.js"></script>
<script src="https://misc.ianrenton.com/jsutils/utils.js?v=1772202095"></script> <script src="https://misc.ianrenton.com/jsutils/utils.js?v=1772219824"></script>
<script src="https://misc.ianrenton.com/jsutils/storage.js?v=1772202095"></script> <script src="https://misc.ianrenton.com/jsutils/storage.js?v=1772219824"></script>
<script src="https://misc.ianrenton.com/jsutils/ui-ham.js?v=1772202095"></script> <script src="https://misc.ianrenton.com/jsutils/ui-ham.js?v=1772219824"></script>
<script src="https://misc.ianrenton.com/jsutils/geo.js?v=1772202095"></script> <script src="https://misc.ianrenton.com/jsutils/geo.js?v=1772219824"></script>
</head> </head>
<body> <body>

View File

@@ -70,9 +70,9 @@
<script> <script>
let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %}; let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %};
</script> </script>
<script src="/js/common.js?v=1772202096"></script> <script src="/js/common.js?v=1772219824"></script>
<script src="/js/spotsbandsandmap.js?v=1772202096"></script> <script src="/js/spotsbandsandmap.js?v=1772219824"></script>
<script src="/js/map.js?v=1772202096"></script> <script src="/js/map.js?v=1772219824"></script>
<script>$(document).ready(function() { $("#nav-link-map").addClass("active"); }); <!-- highlight active page in nav --></script> <script>$(document).ready(function() { $("#nav-link-map").addClass("active"); }); <!-- highlight active page in nav --></script>
{% end %} {% end %}

View File

@@ -87,9 +87,9 @@
<script> <script>
let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %}; let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %};
</script> </script>
<script src="/js/common.js?v=1772202095"></script> <script src="/js/common.js?v=1772219824"></script>
<script src="/js/spotsbandsandmap.js?v=1772202095"></script> <script src="/js/spotsbandsandmap.js?v=1772219824"></script>
<script src="/js/spots.js?v=1772202095"></script> <script src="/js/spots.js?v=1772219824"></script>
<script>$(document).ready(function() { $("#nav-link-spots").addClass("active"); }); <!-- highlight active page in nav --></script> <script>$(document).ready(function() { $("#nav-link-spots").addClass("active"); }); <!-- highlight active page in nav --></script>
{% end %} {% end %}

View File

@@ -3,8 +3,8 @@
<div id="status-container" class="row row-cols-1 row-cols-md-4 g-4 mt-4"></div> <div id="status-container" class="row row-cols-1 row-cols-md-4 g-4 mt-4"></div>
<script src="/js/common.js?v=1772202095"></script> <script src="/js/common.js?v=1772219824"></script>
<script src="/js/status.js?v=1772202095"></script> <script src="/js/status.js?v=1772219824"></script>
<script>$(document).ready(function() { $("#nav-link-status").addClass("active"); }); <!-- highlight active page in nav --></script> <script>$(document).ready(function() { $("#nav-link-status").addClass("active"); }); <!-- highlight active page in nav --></script>
{% end %} {% end %}

View File

@@ -13,13 +13,13 @@ cache = CachedSession("/tmp/cache", expire_after=timedelta(days=30))
data = cache.get("https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json").json() data = cache.get("https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json").json()
for dxcc in data["dxcc"]: for dxcc in data["dxcc"]:
id = dxcc["entityCode"] dxcc_id = dxcc["entityCode"]
flag = dxcc["flag"] flag = dxcc["flag"]
image = Image.new("RGBA", (140, 110), (255, 0, 0, 0)) image = Image.new("RGBA", (140, 110), (255, 0, 0, 0))
draw = ImageDraw.Draw(image) draw = ImageDraw.Draw(image)
draw.text((0, -10), flag, font=ImageFont.truetype("/usr/share/fonts/truetype/noto/NotoColorEmoji.ttf", 109), draw.text((0, -10), flag, font=ImageFont.truetype("/usr/share/fonts/truetype/noto/NotoColorEmoji.ttf", 109),
embedded_color=True) embedded_color=True)
outfile = str(id) + ".png" outfile = str(dxcc_id) + ".png"
image.save(outfile, "PNG") image.save(outfile, "PNG")
image = Image.new("RGBA", (140, 110), (255, 0, 0, 0)) image = Image.new("RGBA", (140, 110), (255, 0, 0, 0))