diff --git a/alertproviders/alert_provider.py b/alertproviders/alert_provider.py index 038ddbb..c4c027e 100644 --- a/alertproviders/alert_provider.py +++ b/alertproviders/alert_provider.py @@ -5,11 +5,12 @@ import pytz from core.config import MAX_ALERT_AGE -# Generic alert provider class. Subclasses of this query the individual APIs for alerts. class AlertProvider: + """Generic alert provider class. Subclasses of this query the individual APIs for alerts.""" - # Constructor def __init__(self, provider_config): + """Constructor""" + self.name = provider_config["name"] self.enabled = provider_config["enabled"] self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC) @@ -17,19 +18,22 @@ class AlertProvider: self.alerts = None self.web_server = None - # Set up the provider, e.g. giving it the alert list to work from def setup(self, alerts, web_server): + """Set up the provider, e.g. giving it the alert list to work from""" + self.alerts = alerts self.web_server = web_server - # Start the provider. This should return immediately after spawning threads to access the remote resources def start(self): + """Start the provider. This should return immediately after spawning threads to access the remote resources""" + raise NotImplementedError("Subclasses must implement this method") - # Submit a batch of alerts retrieved from the provider. There is no timestamp checking like there is for spots, - # because alerts could be created at any point for any time in the future. Rely on hashcode-based id matching - # to deal with duplicates. def submit_batch(self, alerts): + """Submit a batch of alerts retrieved from the provider. There is no timestamp checking like there is for spots, + because alerts could be created at any point for any time in the future. Rely on hashcode-based id matching + to deal with duplicates.""" + # Sort the batch so that earliest ones go in first. This helps keep the ordering correct when alerts are fired # off to SSE listeners. alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0)) @@ -45,6 +49,7 @@ class AlertProvider: if self.web_server: self.web_server.notify_new_alert(alert) - # Stop any threads and prepare for application shutdown def stop(self): - raise NotImplementedError("Subclasses must implement this method") \ No newline at end of file + """Stop any threads and prepare for application shutdown""" + + raise NotImplementedError("Subclasses must implement this method") diff --git a/alertproviders/bota.py b/alertproviders/bota.py index a1e050f..3e19c2d 100644 --- a/alertproviders/bota.py +++ b/alertproviders/bota.py @@ -8,8 +8,9 @@ from data.alert import Alert from data.sig_ref import SIGRef -# Alert provider for Beaches on the Air class BOTA(HTTPAlertProvider): + """Alert provider for Beaches on the Air""" + POLL_INTERVAL_SEC = 1800 ALERTS_URL = "https://www.beachesontheair.com/" @@ -33,7 +34,7 @@ class BOTA(HTTPAlertProvider): # Get the date, dealing with the fact we get no year so have to figure out if it's last year or next year date_text = str(cells[2].find('span').contents[0]).strip() - date_time = datetime.strptime(date_text,"%d %b - %H:%M UTC").replace(tzinfo=pytz.UTC) + date_time = datetime.strptime(date_text, "%d %b - %H:%M UTC").replace(tzinfo=pytz.UTC) date_time = date_time.replace(year=datetime.now(pytz.UTC).year) # If this was more than a day ago, activation is actually next year if date_time < datetime.now(pytz.UTC) - timedelta(days=1): diff --git a/alertproviders/http_alert_provider.py b/alertproviders/http_alert_provider.py index 0f2165b..d621ae0 100644 --- a/alertproviders/http_alert_provider.py +++ b/alertproviders/http_alert_provider.py @@ -9,9 +9,9 @@ from alertproviders.alert_provider import AlertProvider from core.constants import HTTP_HEADERS -# Generic alert provider class for providers that request data via HTTP(S). Just for convenience to avoid code -# duplication. Subclasses of this query the individual APIs for data. class HTTPAlertProvider(AlertProvider): + """Generic alert provider class for providers that request data via HTTP(S). Just for convenience to avoid code + duplication. Subclasses of this query the individual APIs for data.""" def __init__(self, provider_config, url, poll_interval): super().__init__(provider_config) @@ -56,8 +56,9 @@ class HTTPAlertProvider(AlertProvider): # Brief pause on error before the next poll, but still respond promptly to stop() self._stop_event.wait(timeout=1) - # Convert an HTTP response returned by the API into alert data. The whole response is provided here so the subclass - # implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever - # the API actually provides. def http_response_to_alerts(self, http_response): - raise NotImplementedError("Subclasses must implement this method") \ No newline at end of file + """Convert an HTTP response returned by the API into alert data. The whole response is provided here so the subclass + implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever + the API actually provides.""" + + raise NotImplementedError("Subclasses must implement this method") diff --git a/alertproviders/ng3k.py b/alertproviders/ng3k.py index 9bb1a72..c89167f 100644 --- a/alertproviders/ng3k.py +++ b/alertproviders/ng3k.py @@ -8,8 +8,9 @@ from alertproviders.http_alert_provider import HTTPAlertProvider from data.alert import Alert -# Alert provider NG3K DXpedition list class NG3K(HTTPAlertProvider): + """Alert provider NG3K DXpedition list""" + POLL_INTERVAL_SEC = 1800 ALERTS_URL = "https://www.ng3k.com/adxo.xml" AS_CALL_PATTERN = re.compile("as ([a-z0-9/]+)", re.IGNORECASE) @@ -48,7 +49,8 @@ class NG3K(HTTPAlertProvider): start_timestamp = datetime.strptime(start_year + " " + start_mon + " " + start_day, "%Y %b %d").replace( tzinfo=pytz.UTC).timestamp() - end_timestamp = datetime.strptime(end_year + " " + end_mon + " " + end_day + " 23:59", "%Y %b %d %H:%M").replace( + end_timestamp = datetime.strptime(end_year + " " + end_mon + " " + end_day + " 23:59", + "%Y %b %d %H:%M").replace( tzinfo=pytz.UTC).timestamp() # Sometimes the DX callsign is "real", sometimes you just get a prefix with the real working callsigns being @@ -62,7 +64,7 @@ class NG3K(HTTPAlertProvider): dx_calls = [parts[2].upper()] # "Calls" of TBA, TBC or TBD are not real attempts at Turkish callsigns - dx_calls = list(filter(lambda a: a != "TBA" and a != "TBC" and a != "TBD" , dx_calls)) + dx_calls = list(filter(lambda a: a != "TBA" and a != "TBC" and a != "TBD", dx_calls)) dx_country = parts[1] qsl_info = parts[3] diff --git a/alertproviders/parksnpeaks.py b/alertproviders/parksnpeaks.py index 84cd2a7..215f9fa 100644 --- a/alertproviders/parksnpeaks.py +++ b/alertproviders/parksnpeaks.py @@ -8,8 +8,9 @@ from data.alert import Alert from data.sig_ref import SIGRef -# Alert provider for Parks n Peaks class ParksNPeaks(HTTPAlertProvider): + """Alert provider for Parks n Peaks""" + POLL_INTERVAL_SEC = 1800 ALERTS_URL = "http://parksnpeaks.org/api/ALERTS/" diff --git a/alertproviders/pota.py b/alertproviders/pota.py index 8d1b8a1..58982da 100644 --- a/alertproviders/pota.py +++ b/alertproviders/pota.py @@ -7,8 +7,9 @@ from data.alert import Alert from data.sig_ref import SIGRef -# Alert provider for Parks on the Air class POTA(HTTPAlertProvider): + """Alert provider for Parks on the Air""" + POLL_INTERVAL_SEC = 1800 ALERTS_URL = "https://api.pota.app/activation" @@ -25,7 +26,8 @@ class POTA(HTTPAlertProvider): dx_calls=[source_alert["activator"].upper()], freqs_modes=source_alert["frequencies"], comment=source_alert["comments"], - sig_refs=[SIGRef(id=source_alert["reference"], sig="POTA", name=source_alert["name"], url="https://pota.app/#/park/" + source_alert["reference"])], + sig_refs=[SIGRef(id=source_alert["reference"], sig="POTA", name=source_alert["name"], + url="https://pota.app/#/park/" + source_alert["reference"])], start_time=datetime.strptime(source_alert["startDate"] + source_alert["startTime"], "%Y-%m-%d%H:%M").replace(tzinfo=pytz.UTC).timestamp(), end_time=datetime.strptime(source_alert["endDate"] + source_alert["endTime"], diff --git a/alertproviders/sota.py b/alertproviders/sota.py index 40371dc..6238c10 100644 --- a/alertproviders/sota.py +++ b/alertproviders/sota.py @@ -7,8 +7,9 @@ from data.alert import Alert from data.sig_ref import SIGRef -# Alert provider for Summits on the Air class SOTA(HTTPAlertProvider): + """Alert provider for Summits on the Air""" + POLL_INTERVAL_SEC = 1800 ALERTS_URL = "https://api-db2.sota.org.uk/api/alerts/365/all/all" @@ -31,7 +32,9 @@ class SOTA(HTTPAlertProvider): dx_names=[source_alert["activatorName"].upper()], freqs_modes=source_alert["frequency"], comment=source_alert["comments"], - sig_refs=[SIGRef(id=source_alert["associationCode"] + "/" + source_alert["summitCode"], sig="SOTA", name=summit_name, activation_score=summit_points)], + sig_refs=[ + SIGRef(id=source_alert["associationCode"] + "/" + source_alert["summitCode"], sig="SOTA", + name=summit_name, activation_score=summit_points)], start_time=datetime.strptime(source_alert["dateActivated"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.UTC).timestamp(), is_dxpedition=False) diff --git a/alertproviders/wota.py b/alertproviders/wota.py index 3f40caa..7ff8fbb 100644 --- a/alertproviders/wota.py +++ b/alertproviders/wota.py @@ -8,8 +8,9 @@ from data.alert import Alert from data.sig_ref import SIGRef -# Alert provider for Wainwrights on the Air class WOTA(HTTPAlertProvider): + """Alert provider for Wainwrights on the Air""" + POLL_INTERVAL_SEC = 1800 ALERTS_URL = "https://www.wota.org.uk/alerts_rss.php" RSS_DATE_TIME_FORMAT = "%a, %d %b %Y %H:%M:%S %z" diff --git a/alertproviders/wwff.py b/alertproviders/wwff.py index 5da3f2e..013ece5 100644 --- a/alertproviders/wwff.py +++ b/alertproviders/wwff.py @@ -7,8 +7,9 @@ from data.alert import Alert from data.sig_ref import SIGRef -# Alert provider for Worldwide Flora and Fauna class WWFF(HTTPAlertProvider): + """Alert provider for Worldwide Flora and Fauna""" + POLL_INTERVAL_SEC = 1800 ALERTS_URL = "https://spots.wwff.co/static/agendas.json" diff --git a/core/cache_utils.py b/core/cache_utils.py index 3ed58fe..3093c2f 100644 --- a/core/cache_utils.py +++ b/core/cache_utils.py @@ -7,4 +7,4 @@ from requests_cache import CachedSession # of time has passed. This is used throughout Spothole to cache data that does not change # rapidly. SEMI_STATIC_URL_DATA_CACHE = CachedSession("cache/semi_static_url_data_cache", - expire_after=timedelta(days=30)) \ No newline at end of file + expire_after=timedelta(days=30)) diff --git a/core/cleanup.py b/core/cleanup.py index c916041..6ee8cac 100644 --- a/core/cleanup.py +++ b/core/cleanup.py @@ -6,11 +6,12 @@ from time import sleep import pytz -# Provides a timed cleanup of the spot list. class CleanupTimer: + """Provides a timed cleanup of the spot list.""" - # Constructor def __init__(self, spots, alerts, web_server, cleanup_interval): + """Constructor""" + self.spots = spots self.alerts = alerts self.web_server = web_server @@ -20,21 +21,24 @@ class CleanupTimer: self.status = "Starting" self._stop_event = Event() - # Start the cleanup timer def start(self): + """Start the cleanup timer""" + self._thread = Thread(target=self._run, daemon=True) self._thread.start() - # Stop any threads and prepare for application shutdown def stop(self): + """Stop any threads and prepare for application shutdown""" + self._stop_event.set() def _run(self): while not self._stop_event.wait(timeout=self.cleanup_interval): self._cleanup() - # Perform cleanup and reschedule next timer def _cleanup(self): + """Perform cleanup and reschedule next timer""" + try: # Perform cleanup via letting the data expire self.spots.expire() diff --git a/core/config.py b/core/config.py index 17b9dd7..77ca810 100644 --- a/core/config.py +++ b/core/config.py @@ -23,7 +23,7 @@ WEB_UI_OPTIONS = config["web-ui-options"] # For ease of config, each spot provider owns its own config about whether it should be enabled by default in the web UI # but for consistency we provide this to the front-end in web-ui-options because it has no impact outside of the web UI. WEB_UI_OPTIONS["spot-providers-enabled-by-default"] = [p["name"] for p in config["spot-providers"] if p["enabled"] and ( - "enabled-by-default-in-web-ui" not in p or p["enabled-by-default-in-web-ui"] == True)] + "enabled-by-default-in-web-ui" not in p or p["enabled-by-default-in-web-ui"] == True)] # If spotting to this server is enabled, "API" is another valid spot source even though it does not come from # one of our proviers. We set that to also be enabled by default. if ALLOW_SPOTTING: diff --git a/core/constants.py b/core/constants.py index c2c5fa8..02719eb 100644 --- a/core/constants.py +++ b/core/constants.py @@ -12,27 +12,27 @@ HAMQTH_PRG = (SOFTWARE_NAME + " v" + SOFTWARE_VERSION + " operated by " + SERVER # Special Interest Groups SIGS = [ - SIG(name="POTA", description="Parks on the Air", ref_regex=r"[A-Z]{2}\-\d{4,5}"), - SIG(name="SOTA", description="Summits on the Air", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{2}\-\d{3}"), - SIG(name="WWFF", description="World Wide Flora & Fauna", ref_regex=r"[A-Z0-9]{1,3}FF\-\d{4}"), - SIG(name="GMA", description="Global Mountain Activity", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{2}\-\d{3}"), - SIG(name="WWBOTA", description="Worldwide Bunkers on the Air", ref_regex=r"B\/[A-Z0-9]{1,3}\-\d{3,4}"), - SIG(name="HEMA", description="HuMPs Excluding Marilyns Award", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{3}\-\d{3}"), - SIG(name="IOTA", description="Islands on the Air", ref_regex=r"[A-Z]{2}\-\d{3}"), - SIG(name="MOTA", description="Mills on the Air", ref_regex=r"X\d{4-6}"), - SIG(name="ARLHS", description="Amateur Radio Lighthouse Society", ref_regex=r"[A-Z]{3}\-\d{3,4}"), - SIG(name="ILLW", description="International Lighthouse & Lightship Weekend", ref_regex=r"[A-Z]{2}\d{4}"), - SIG(name="SIOTA", description="Silos on the Air", ref_regex=r"[A-Z]{2}\-[A-Z]{3}\d"), - SIG(name="WCA", description="World Castles Award", ref_regex=r"[A-Z0-9]{1,3}\-\d{5}"), - SIG(name="ZLOTA", description="New Zealand on the Air", ref_regex=r"ZL[A-Z]/[A-Z]{2}\-\d{3,4}"), - SIG(name="WOTA", description="Wainwrights on the Air", ref_regex=r"[A-Z]{3}-[0-9]{2}"), - SIG(name="BOTA", description="Beaches on the Air"), - SIG(name="KRMNPA", description="Keith Roget Memorial National Parks Award"), - SIG(name="LLOTA", description="Lagos y Lagunas on the Air", ref_regex=r"[A-Z]{2}\-\d{4}"), - SIG(name="WWTOTA", description="Towers on the Air", ref_regex=r"[A-Z]{2}R\-\d{4}"), - SIG(name="WAB", description="Worked All Britain", ref_regex=r"[A-Z]{1,2}[0-9]{2}"), - SIG(name="WAI", description="Worked All Ireland", ref_regex=r"[A-Z][0-9]{2}"), - SIG(name="TOTA", description="Toilets on the Air", ref_regex=r"T\-[0-9]{2}") + SIG(name="POTA", description="Parks on the Air", ref_regex=r"[A-Z]{2}\-\d{4,5}"), + SIG(name="SOTA", description="Summits on the Air", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{2}\-\d{3}"), + SIG(name="WWFF", description="World Wide Flora & Fauna", ref_regex=r"[A-Z0-9]{1,3}FF\-\d{4}"), + SIG(name="GMA", description="Global Mountain Activity", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{2}\-\d{3}"), + SIG(name="WWBOTA", description="Worldwide Bunkers on the Air", ref_regex=r"B\/[A-Z0-9]{1,3}\-\d{3,4}"), + SIG(name="HEMA", description="HuMPs Excluding Marilyns Award", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{3}\-\d{3}"), + SIG(name="IOTA", description="Islands on the Air", ref_regex=r"[A-Z]{2}\-\d{3}"), + SIG(name="MOTA", description="Mills on the Air", ref_regex=r"X\d{4-6}"), + SIG(name="ARLHS", description="Amateur Radio Lighthouse Society", ref_regex=r"[A-Z]{3}\-\d{3,4}"), + SIG(name="ILLW", description="International Lighthouse & Lightship Weekend", ref_regex=r"[A-Z]{2}\d{4}"), + SIG(name="SIOTA", description="Silos on the Air", ref_regex=r"[A-Z]{2}\-[A-Z]{3}\d"), + SIG(name="WCA", description="World Castles Award", ref_regex=r"[A-Z0-9]{1,3}\-\d{5}"), + SIG(name="ZLOTA", description="New Zealand on the Air", ref_regex=r"ZL[A-Z]/[A-Z]{2}\-\d{3,4}"), + SIG(name="WOTA", description="Wainwrights on the Air", ref_regex=r"[A-Z]{3}-[0-9]{2}"), + SIG(name="BOTA", description="Beaches on the Air"), + SIG(name="KRMNPA", description="Keith Roget Memorial National Parks Award"), + SIG(name="LLOTA", description="Lagos y Lagunas on the Air", ref_regex=r"[A-Z]{2}\-\d{4}"), + SIG(name="WWTOTA", description="Towers on the Air", ref_regex=r"[A-Z]{2}R\-\d{4}"), + SIG(name="WAB", description="Worked All Britain", ref_regex=r"[A-Z]{1,2}[0-9]{2}"), + SIG(name="WAI", description="Worked All Ireland", ref_regex=r"[A-Z][0-9]{2}"), + SIG(name="TOTA", description="Toilets on the Air", ref_regex=r"T\-[0-9]{2}") ] # Modes. Note "DIGI" and "DIGITAL" are also supported but are normalised into "DATA". diff --git a/core/geo_utils.py b/core/geo_utils.py index 88a4661..6322715 100644 --- a/core/geo_utils.py +++ b/core/geo_utils.py @@ -18,8 +18,10 @@ for idx in cq_zone_data.index: for idx in itu_zone_data.index: prepare(itu_zone_data.at[idx, 'geometry']) -# Finds out which CQ zone a lat/lon point is in. + def lat_lon_to_cq_zone(lat, lon): + """Finds out which CQ zone a lat/lon point is in.""" + lon = ((lon + 180) % 360) - 180 for index, row in cq_zone_data.iterrows(): polygon = Polygon(row["geometry"]) @@ -38,8 +40,9 @@ def lat_lon_to_cq_zone(lat, lon): return None -# Finds out which ITU zone a lat/lon point is in. def lat_lon_to_itu_zone(lat, lon): + """Finds out which ITU zone a lat/lon point is in.""" + lon = ((lon + 180) % 360) - 180 for index, row in itu_zone_data.iterrows(): polygon = Polygon(row["geometry"]) @@ -58,9 +61,10 @@ def lat_lon_to_itu_zone(lat, lon): return None -# Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the centre point of the square. -# Returns None if the grid format is invalid. def lat_lon_for_grid_centre(grid): + """Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the centre point of the square. + Returns None if the grid format is invalid.""" + lat, lon, lat_cell_size, lon_cell_size = lat_lon_for_grid_sw_corner_plus_size(grid) if lat is not None and lon is not None and lat_cell_size is not None and lon_cell_size is not None: return [lat + lat_cell_size / 2.0, lon + lon_cell_size / 2.0] @@ -68,18 +72,21 @@ def lat_lon_for_grid_centre(grid): return None -# Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the southwest corner of the square. -# Returns None if the grid format is invalid. def lat_lon_for_grid_sw_corner(grid): + """Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the southwest corner of the square. + Returns None if the grid format is invalid.""" + lat, lon, lat_cell_size, lon_cell_size = lat_lon_for_grid_sw_corner_plus_size(grid) if lat is not None and lon is not None: return [lat, lon] else: return None -# Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the northeast corner of the square. -# Returns None if the grid format is invalid. + def lat_lon_for_grid_ne_corner(grid): + """Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the northeast corner of the square. + Returns None if the grid format is invalid.""" + lat, lon, lat_cell_size, lon_cell_size = lat_lon_for_grid_sw_corner_plus_size(grid) if lat is not None and lon is not None and lat_cell_size is not None and lon_cell_size is not None: return [lat + lat_cell_size, lon + lon_cell_size] @@ -87,11 +94,12 @@ def lat_lon_for_grid_ne_corner(grid): return None -# Convert a Maidenhead grid reference of arbitrary precision to lat/long, including in the result the size of the -# lowest grid square. This is a utility method used by the main methods that return the centre, southwest, and -# northeast coordinates of a grid square. -# The return type is always a tuple of size 4. The elements in it are None if the grid format is invalid. def lat_lon_for_grid_sw_corner_plus_size(grid): + """Convert a Maidenhead grid reference of arbitrary precision to lat/long, including in the result the size of the + lowest grid square. This is a utility method used by the main methods that return the centre, southwest, and + northeast coordinates of a grid square. + The return type is always a tuple of size 4. The elements in it are None if the grid format is invalid.""" + # Make sure we are in upper case so our maths works. Case is arbitrary for Maidenhead references grid = grid.upper() @@ -157,8 +165,9 @@ def lat_lon_for_grid_sw_corner_plus_size(grid): return lat, lon, lat_cell_size, lon_cell_size -# Convert a Worked All Britain or Worked All Ireland reference to a lat/lon point. def wab_wai_square_to_lat_lon(ref): + """Convert a Worked All Britain or Worked All Ireland reference to a lat/lon point.""" + # First check we have a valid grid square, and based on what it looks like, use either the Ordnance Survey, Irish, # or UTM grid systems to perform the conversion. if re.match(r"^[HNOST][ABCDEFGHJKLMNOPQRSTUVWXYZ][0-9]{2}$", ref): @@ -172,8 +181,9 @@ def wab_wai_square_to_lat_lon(ref): return None -# Get a lat/lon point for the centre of an Ordnance Survey grid square def os_grid_square_to_lat_lon(ref): + """Get a lat/lon point for the centre of an Ordnance Survey grid square""" + # Convert the letters into multipliers for the 500km squares and 100km squares offset_500km_multiplier = ord(ref[0]) - 65 offset_100km_multiplier = ord(ref[1]) - 65 @@ -202,8 +212,9 @@ def os_grid_square_to_lat_lon(ref): return lat, lon -# Get a lat/lon point for the centre of an Irish Grid square. def irish_grid_square_to_lat_lon(ref): + """Get a lat/lon point for the centre of an Irish Grid square.""" + # Convert the letters into multipliers for the 100km squares offset_100km_multiplier = ord(ref[0]) - 65 @@ -229,8 +240,9 @@ def irish_grid_square_to_lat_lon(ref): return lat, lon -# Get a lat/lon point for the centre of a UTM grid square (supports only squares WA & WV for the Channel Islands, nothing else implemented) def utm_grid_square_to_lat_lon(ref): + """Get a lat/lon point for the centre of a UTM grid square (supports only squares WA & WV for the Channel Islands, nothing else implemented)""" + # Take the numeric parts of the grid square and multiply by 10000 to get metres from the corner of the letter-based grid square easting = int(ref[2]) * 10000 northing = int(ref[3]) * 10000 diff --git a/core/lookup_helper.py b/core/lookup_helper.py index 7840c09..cd76403 100644 --- a/core/lookup_helper.py +++ b/core/lookup_helper.py @@ -19,13 +19,14 @@ from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODE HTTP_HEADERS, HAMQTH_PRG, MODE_ALIASES -# Singleton class that provides lookup functionality. class LookupHelper: + """Singleton class that provides lookup functionality.""" - # Create the lookup helper. Note that nothing actually happens until the start() method is called, and that all - # lookup methods will fail if start() has not yet been called. This therefore needs starting before any spot or - # alert handlers are created. def __init__(self): + """Create the lookup helper. Note that nothing actually happens until the start() method is called, and that all + lookup methods will fail if start() has not yet been called. This therefore needs starting before any spot or + alert handlers are created.""" + self.CLUBLOG_CALLSIGN_DATA_CACHE = None self.LOOKUP_LIB_CLUBLOG_XML = None self.CLUBLOG_XML_AVAILABLE = None @@ -105,11 +106,12 @@ class LookupHelper: for dxcc in self.DXCC_DATA.values(): dxcc["_prefixRegexCompiled"] = re.compile(dxcc["prefixRegex"]) - # Download the cty.plist file from country-files.com on first startup. The pyhamtools lib can actually download and use - # this itself, but it's occasionally offline which causes it to throw an error. By downloading it separately, we can - # catch errors and handle them, falling back to a previous copy of the file in the cache, and we can use the - # requests_cache library to prevent re-downloading too quickly if the software keeps restarting. def download_country_files_cty_plist(self): + """Download the cty.plist file from country-files.com on first startup. The pyhamtools lib can actually download and use + this itself, but it's occasionally offline which causes it to throw an error. By downloading it separately, we can + catch errors and handle them, falling back to a previous copy of the file in the cache, and we can use the + requests_cache library to prevent re-downloading too quickly if the software keeps restarting.""" + try: logging.info("Downloading Country-files.com cty.plist...") response = SEMI_STATIC_URL_DATA_CACHE.get("https://www.country-files.com/cty/cty.plist", @@ -124,12 +126,14 @@ class LookupHelper: logging.error("Exception when downloading Clublog cty.xml", e) return False - # Download the dxcc.json file on first startup. def download_dxcc_json(self): + """Download the dxcc.json file on first startup.""" + try: logging.info("Downloading dxcc.json...") - response = SEMI_STATIC_URL_DATA_CACHE.get("https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json", - headers=HTTP_HEADERS).text + response = SEMI_STATIC_URL_DATA_CACHE.get( + "https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json", + headers=HTTP_HEADERS).text with open(self.DXCC_JSON_DOWNLOAD_LOCATION, "w") as f: f.write(response) @@ -140,9 +144,10 @@ class LookupHelper: logging.error("Exception when downloading dxcc.json", e) return False - # Download the cty.xml (gzipped) file from Clublog on first startup, so we can use it in preference to querying the - # database live if possible. def download_clublog_ctyxml(self): + """Download the cty.xml (gzipped) file from Clublog on first startup, so we can use it in preference to querying the + database live if possible.""" + try: logging.info("Downloading Clublog cty.xml.gz...") response = self.CLUBLOG_CTY_XML_CACHE.get("https://cdn.clublog.org/cty.php?api=" + self.CLUBLOG_API_KEY, @@ -161,8 +166,9 @@ class LookupHelper: logging.error("Exception when downloading Clublog cty.xml", e) return False - # Infer a mode from the comment def infer_mode_from_comment(self, comment): + """Infer a mode from the comment""" + for mode in ALL_MODES: if mode in comment.upper(): return mode @@ -171,8 +177,9 @@ class LookupHelper: return MODE_ALIASES[mode] return None - # Infer a "mode family" from a mode. def infer_mode_type_from_mode(self, mode): + """Infer a "mode family" from a mode.""" + if mode.upper() in CW_MODES: return "CW" elif mode.upper() in PHONE_MODES: @@ -184,15 +191,17 @@ class LookupHelper: logging.warn("Found an unrecognised mode: " + mode + ". Developer should categorise this.") return None - # Infer a band from a frequency in Hz def infer_band_from_freq(self, freq): + """Infer a band from a frequency in Hz""" + for b in BANDS: if b.start_freq <= freq <= b.end_freq: return b return UNKNOWN_BAND - # Infer a country name from a callsign def infer_country_from_callsign(self, call): + """Infer a country name from a callsign""" + try: # Start with the basic country-files.com-based decoder. country = self.CALL_INFO_BASIC.get_country_name(call) @@ -224,8 +233,9 @@ class LookupHelper: country = dxcc_data["name"] return country - # Infer a DXCC ID from a callsign def infer_dxcc_id_from_callsign(self, call): + """Infer a DXCC ID from a callsign""" + try: # Start with the basic country-files.com-based decoder. dxcc = self.CALL_INFO_BASIC.get_adif_id(call) @@ -257,8 +267,9 @@ class LookupHelper: dxcc = dxcc_data["entityCode"] return dxcc - # Infer a continent shortcode from a callsign def infer_continent_from_callsign(self, call): + """Infer a continent shortcode from a callsign""" + try: # Start with the basic country-files.com-based decoder. continent = self.CALL_INFO_BASIC.get_continent(call) @@ -286,8 +297,9 @@ class LookupHelper: continent = dxcc_data["continent"][0] return continent - # Infer a CQ zone from a callsign def infer_cq_zone_from_callsign(self, call): + """Infer a CQ zone from a callsign""" + try: # Start with the basic country-files.com-based decoder. cqz = self.CALL_INFO_BASIC.get_cqz(call) @@ -320,8 +332,9 @@ class LookupHelper: cqz = dxcc_data["cq"][0] return cqz - # Infer a ITU zone from a callsign def infer_itu_zone_from_callsign(self, call): + """Infer a ITU zone from a callsign""" + try: # Start with the basic country-files.com-based decoder. ituz = self.CALL_INFO_BASIC.get_ituz(call) @@ -345,12 +358,14 @@ class LookupHelper: ituz = dxcc_data["itu"] return ituz - # Get an emoji flag for a given DXCC entity ID def get_flag_for_dxcc(self, dxcc): + """Get an emoji flag for a given DXCC entity ID""" + return self.DXCC_DATA[dxcc]["flag"] if dxcc in self.DXCC_DATA else None - # Infer an operator name from a callsign (requires QRZ.com/HamQTH) def infer_name_from_callsign_online_lookup(self, call): + """Infer an operator name from a callsign (requires QRZ.com/HamQTH)""" + data = self.get_qrz_data_for_callsign(call) if data and "fname" in data: name = data["fname"] @@ -363,32 +378,41 @@ class LookupHelper: else: return None - # Infer a latitude and longitude from a callsign (requires QRZ.com/HamQTH) - # Coordinates that look default are rejected (apologies if your position really is 0,0, enjoy your voyage) def infer_latlon_from_callsign_online_lookup(self, call): + """Infer a latitude and longitude from a callsign (requires QRZ.com/HamQTH) + Coordinates that look default are rejected (apologies if your position really is 0,0, enjoy your voyage)""" + data = self.get_qrz_data_for_callsign(call) - if data and "latitude" in data and "longitude" in data and (float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float(data["latitude"]) < 89.9: + if data and "latitude" in data and "longitude" in data and ( + float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float( + data["latitude"]) < 89.9: return [float(data["latitude"]), float(data["longitude"])] data = self.get_hamqth_data_for_callsign(call) - if data and "latitude" in data and "longitude" in data and (float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float(data["latitude"]) < 89.9: + if data and "latitude" in data and "longitude" in data and ( + float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float( + data["latitude"]) < 89.9: return [float(data["latitude"]), float(data["longitude"])] else: return None - # Infer a grid locator from a callsign (requires QRZ.com/HamQTH). - # Grids that look default are rejected (apologies if your grid really is AA00aa, enjoy your research) def infer_grid_from_callsign_online_lookup(self, call): + """Infer a grid locator from a callsign (requires QRZ.com/HamQTH). + Grids that look default are rejected (apologies if your grid really is AA00aa, enjoy your research)""" + data = self.get_qrz_data_for_callsign(call) - if data and "locator" in data and data["locator"].upper() != "AA00" and data["locator"].upper() != "AA00AA" and data["locator"].upper() != "AA00AA00": + if data and "locator" in data and data["locator"].upper() != "AA00" and data["locator"].upper() != "AA00AA" and \ + data["locator"].upper() != "AA00AA00": return data["locator"] data = self.get_hamqth_data_for_callsign(call) - if data and "grid" in data and data["grid"].upper() != "AA00" and data["grid"].upper() != "AA00AA" and data["grid"].upper() != "AA00AA00": + if data and "grid" in data and data["grid"].upper() != "AA00" and data["grid"].upper() != "AA00AA" and data[ + "grid"].upper() != "AA00AA00": return data["grid"] else: return None - # Infer a textual QTH from a callsign (requires QRZ.com/HamQTH) def infer_qth_from_callsign_online_lookup(self, call): + """Infer a textual QTH from a callsign (requires QRZ.com/HamQTH)""" + data = self.get_qrz_data_for_callsign(call) if data and "addr2" in data: return data["addr2"] @@ -398,8 +422,9 @@ class LookupHelper: else: return None - # Infer a latitude and longitude from a callsign (using DXCC, probably very inaccurate) def infer_latlon_from_callsign_dxcc(self, call): + """Infer a latitude and longitude from a callsign (using DXCC, probably very inaccurate)""" + try: data = self.CALL_INFO_BASIC.get_lat_long(call) if data and "latitude" in data and "longitude" in data: @@ -419,8 +444,9 @@ class LookupHelper: loc = [float(data["Lat"]), float(data["Lon"])] return loc - # Infer a grid locator from a callsign (using DXCC, probably very inaccurate) def infer_grid_from_callsign_dxcc(self, call): + """Infer a grid locator from a callsign (using DXCC, probably very inaccurate)""" + latlon = self.infer_latlon_from_callsign_dxcc(call) grid = None try: @@ -429,8 +455,9 @@ class LookupHelper: logging.debug("Invalid lat/lon received for DXCC") return grid - # Infer a mode from the frequency (in Hz) according to the band plan. Just a guess really. def infer_mode_from_frequency(self, freq): + """Infer a mode from the frequency (in Hz) according to the band plan. Just a guess really.""" + try: khz = freq / 1000.0 mode = freq_to_band(khz)["mode"] @@ -449,8 +476,9 @@ class LookupHelper: except KeyError: return None - # Utility method to get QRZ.com data from cache if possible, if not get it from the API and cache it def get_qrz_data_for_callsign(self, call): + """Utility method to get QRZ.com data from cache if possible, if not get it from the API and cache it""" + # Fetch from cache if we can, otherwise fetch from the API and cache it if call in self.QRZ_CALLSIGN_DATA_CACHE: return self.QRZ_CALLSIGN_DATA_CACHE.get(call) @@ -477,8 +505,9 @@ class LookupHelper: else: return None - # Utility method to get HamQTH data from cache if possible, if not get it from the API and cache it def get_hamqth_data_for_callsign(self, call): + """Utility method to get HamQTH data from cache if possible, if not get it from the API and cache it""" + # Fetch from cache if we can, otherwise fetch from the API and cache it if call in self.HAMQTH_CALLSIGN_DATA_CACHE: return self.HAMQTH_CALLSIGN_DATA_CACHE.get(call) @@ -505,7 +534,8 @@ class LookupHelper: try: lookup_data = SEMI_STATIC_URL_DATA_CACHE.get( self.HAMQTH_BASE_URL + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus( - callinfo.Callinfo.get_homecall(call)) + "&prg=" + HAMQTH_PRG, headers=HTTP_HEADERS).content + callinfo.Callinfo.get_homecall(call)) + "&prg=" + HAMQTH_PRG, + headers=HTTP_HEADERS).content data = xmltodict.parse(lookup_data)["HamQTH"]["search"] self.HAMQTH_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds return data @@ -520,8 +550,9 @@ class LookupHelper: logging.error("Exception when looking up HamQTH data") return None - # Utility method to get Clublog API data from cache if possible, if not get it from the API and cache it def get_clublog_api_data_for_callsign(self, call): + """Utility method to get Clublog API data from cache if possible, if not get it from the API and cache it""" + # Fetch from cache if we can, otherwise fetch from the API and cache it if call in self.CLUBLOG_CALLSIGN_DATA_CACHE: return self.CLUBLOG_CALLSIGN_DATA_CACHE.get(call) @@ -547,8 +578,9 @@ class LookupHelper: else: return None - # Utility method to get Clublog XML data from file def get_clublog_xml_data_for_callsign(self, call): + """Utility method to get Clublog XML data from file""" + if self.CLUBLOG_XML_AVAILABLE: try: data = self.LOOKUP_LIB_CLUBLOG_XML.lookup_callsign(callsign=call) @@ -560,15 +592,17 @@ class LookupHelper: else: return None - # Utility method to get generic DXCC data from our lookup table, if we can find it def get_dxcc_data_for_callsign(self, call): + """Utility method to get generic DXCC data from our lookup table, if we can find it""" + for entry in self.DXCC_DATA.values(): if entry["_prefixRegexCompiled"].match(call): return entry return None - # Shutdown method to close down any caches neatly. def stop(self): + """Shutdown method to close down any caches neatly.""" + self.QRZ_CALLSIGN_DATA_CACHE.close() self.CLUBLOG_CALLSIGN_DATA_CACHE.close() diff --git a/core/prometheus_metrics_handler.py b/core/prometheus_metrics_handler.py index 6cbc823..6914b1f 100644 --- a/core/prometheus_metrics_handler.py +++ b/core/prometheus_metrics_handler.py @@ -31,6 +31,7 @@ memory_use_gauge = Gauge( ) -# Get a Prometheus metrics response for the web server def get_metrics(): + """Get a Prometheus metrics response for the web server""" + return generate_latest(registry) diff --git a/core/sig_utils.py b/core/sig_utils.py index e62433c..7e02303 100644 --- a/core/sig_utils.py +++ b/core/sig_utils.py @@ -8,18 +8,20 @@ from core.constants import SIGS, HTTP_HEADERS from core.geo_utils import wab_wai_square_to_lat_lon -# Utility function to get the regex string for a SIG reference for a named SIG. If no match is found, None will be returned. def get_ref_regex_for_sig(sig): + """Utility function to get the regex string for a SIG reference for a named SIG. If no match is found, None will be returned.""" + for s in SIGS: if s.name.upper() == sig.upper(): return s.ref_regex return None -# Look up details of a SIG reference (e.g. POTA park) such as name, lat/lon, and grid. Takes in a sig_ref object which -# must at minimum have a "sig" and an "id". The rest of the object will be populated and returned. -# Note there is currently no support for KRMNPA location lookup, see issue #61. def populate_sig_ref_info(sig_ref): + """Look up details of a SIG reference (e.g. POTA park) such as name, lat/lon, and grid. Takes in a sig_ref object which + must at minimum have a "sig" and an "id". The rest of the object will be populated and returned. + Note there is currently no support for KRMNPA location lookup, see issue #61.""" + if sig_ref.sig is None or sig_ref.id is None: logging.warning("Failed to look up sig_ref info, sig or id were not set.") @@ -67,7 +69,7 @@ def populate_sig_ref_info(sig_ref): sig_ref.longitude = data["longitude"] if "longitude" in data else None elif sig.upper() == "WWFF": wwff_csv_data = SEMI_STATIC_URL_DATA_CACHE.get("https://wwff.co/wwff-data/wwff_directory.csv", - headers=HTTP_HEADERS) + headers=HTTP_HEADERS) wwff_dr = csv.DictReader(wwff_csv_data.content.decode().splitlines()) for row in wwff_dr: if row["reference"] == ref_id: @@ -75,7 +77,8 @@ def populate_sig_ref_info(sig_ref): sig_ref.url = "https://wwff.co/directory/?showRef=" + ref_id sig_ref.grid = row["iaruLocator"] if "iaruLocator" in row and row["iaruLocator"] != "-" else None sig_ref.latitude = float(row["latitude"]) if "latitude" in row and row["latitude"] != "-" else None - sig_ref.longitude = float(row["longitude"]) if "longitude" in row and row["longitude"] != "-" else None + sig_ref.longitude = float(row["longitude"]) if "longitude" in row and row[ + "longitude"] != "-" else None break elif sig.upper() == "SIOTA": siota_csv_data = SEMI_STATIC_URL_DATA_CACHE.get("https://www.silosontheair.com/data/silos.csv", @@ -124,7 +127,8 @@ def populate_sig_ref_info(sig_ref): sig_ref.name = sig_ref.id sig_ref.url = "https://www.beachesontheair.com/beaches/" + sig_ref.name.lower().replace(" ", "-") elif sig.upper() == "LLOTA": - data = SEMI_STATIC_URL_DATA_CACHE.get("https://llota.app/api/public/references", headers=HTTP_HEADERS).json() + data = SEMI_STATIC_URL_DATA_CACHE.get("https://llota.app/api/public/references", + headers=HTTP_HEADERS).json() if data: for ref in data: if ref["reference_code"] == ref_id: diff --git a/core/status_reporter.py b/core/status_reporter.py index a664d00..9cfefa7 100644 --- a/core/status_reporter.py +++ b/core/status_reporter.py @@ -10,12 +10,13 @@ from core.constants import SOFTWARE_VERSION from core.prometheus_metrics_handler import memory_use_gauge, spots_gauge, alerts_gauge -# Provides a timed update of the application's status data. class StatusReporter: + """Provides a timed update of the application's status data.""" - # Constructor def __init__(self, status_data, run_interval, web_server, cleanup_timer, spots, spot_providers, alerts, alert_providers): + """Constructor""" + self.status_data = status_data self.run_interval = run_interval self.web_server = web_server @@ -30,24 +31,28 @@ class StatusReporter: self.status_data["software-version"] = SOFTWARE_VERSION self.status_data["server-owner-callsign"] = SERVER_OWNER_CALLSIGN - # Start the reporter thread def start(self): + """Start the reporter thread""" + self._thread = Thread(target=self._run, daemon=True) self._thread.start() - # Stop any threads and prepare for application shutdown def stop(self): + """Stop any threads and prepare for application shutdown""" + self._stop_event.set() - # Thread entry point: report immediately on startup, then on each interval until stopped def _run(self): + """Thread entry point: report immediately on startup, then on each interval until stopped""" + while True: self._report() if self._stop_event.wait(timeout=self.run_interval): break - # Write status information def _report(self): + """Write status information""" + self.status_data["uptime"] = (datetime.now(pytz.UTC) - self.startup_time).total_seconds() self.status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3) self.status_data["num_spots"] = len(self.spots) @@ -57,7 +62,8 @@ class StatusReporter: "last_updated": p.last_update_time.replace( tzinfo=pytz.UTC).timestamp() if p.last_update_time.year > 2000 else 0, "last_spot": p.last_spot_time.replace( - tzinfo=pytz.UTC).timestamp() if p.last_spot_time.year > 2000 else 0}, self.spot_providers)) + tzinfo=pytz.UTC).timestamp() if p.last_spot_time.year > 2000 else 0}, + self.spot_providers)) self.status_data["alert_providers"] = list( map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, "last_updated": p.last_update_time.replace( @@ -81,4 +87,4 @@ class StatusReporter: # Update Prometheus metrics memory_use_gauge.set(psutil.Process(os.getpid()).memory_info().rss * 1024) spots_gauge.set(len(self.spots)) - alerts_gauge.set(len(self.alerts)) \ No newline at end of file + alerts_gauge.set(len(self.alerts)) diff --git a/core/utils.py b/core/utils.py index 504784d..d8c6c6d 100644 --- a/core/utils.py +++ b/core/utils.py @@ -1,14 +1,15 @@ -# Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things. -# Just converts objects to dict. Try to avoid doing anything clever here when serialising spots, because we also need -# to receive spots without complex handling. def serialize_everything(obj): + """Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things. + Just converts objects to dict. Try to avoid doing anything clever here when serialising spots, because we also need + to receive spots without complex handling.""" return obj.__dict__ -# Empty a queue def empty_queue(q): + """Empty a queue""" + while not q.empty(): try: q.get_nowait() except: - break \ No newline at end of file + break diff --git a/data/alert.py b/data/alert.py index 0868907..e5fd079 100644 --- a/data/alert.py +++ b/data/alert.py @@ -10,9 +10,10 @@ from core.lookup_helper import lookup_helper from core.sig_utils import populate_sig_ref_info -# Data class that defines an alert. @dataclass class Alert: + """Data class that defines an alert.""" + # Unique identifier for the alert id: str = None # Callsigns of the operators that has been alerted @@ -60,8 +61,9 @@ class Alert: # The ID the source gave it, if any. source_id: str = None - # Infer missing parameters where possible def infer_missing(self): + """Infer missing parameters where possible""" + # If we somehow don't have a start time, set it to zero so it sorts off the bottom of any list but # clients can still reliably parse it as a number. if not self.start_time: @@ -122,14 +124,16 @@ class Alert: self_copy.received_time_iso = "" self.id = hashlib.sha256(str(self_copy).encode("utf-8")).hexdigest() - # JSON serialise def to_json(self): + """JSON serialise""" + return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True) - # Decide if this alert has expired (in which case it should not be added to the system in the first place, and not - # returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as - # either having an end_time in the past, or if it only has a start_time, then that start time was more than 3 hours - # ago. If it somehow doesn't have a start_time either, it is considered to be expired. def expired(self): + """Decide if this alert has expired (in which case it should not be added to the system in the first place, and not + returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as + either having an end_time in the past, or if it only has a start_time, then that start time was more than 3 hours + ago. If it somehow doesn't have a start_time either, it is considered to be expired.""" + return not self.start_time or (self.end_time and self.end_time < datetime.now(pytz.UTC).timestamp()) or ( not self.end_time and self.start_time < (datetime.now(pytz.UTC) - timedelta(hours=3)).timestamp()) diff --git a/data/band.py b/data/band.py index 56dd099..4d6b50c 100644 --- a/data/band.py +++ b/data/band.py @@ -1,11 +1,13 @@ from dataclasses import dataclass -# Data class that defines a band. + @dataclass class Band: + """Data class that defines a band.""" + # Band name name: str # Start frequency, in Hz start_freq: float # Stop frequency, in Hz - end_freq: float \ No newline at end of file + end_freq: float diff --git a/data/sig.py b/data/sig.py index 64688c5..7ec81c6 100644 --- a/data/sig.py +++ b/data/sig.py @@ -1,11 +1,13 @@ from dataclasses import dataclass -# Data class that defines a Special Interest Group. + @dataclass class SIG: + """Data class that defines a Special Interest Group.""" + # SIG name, e.g. "POTA" name: str # Description, e.g. "Parks on the Air" description: str # Regex matcher for references, e.g. for POTA r"[A-Z]{2}\-\d+". - ref_regex: str = None \ No newline at end of file + ref_regex: str = None diff --git a/data/sig_ref.py b/data/sig_ref.py index 76fa0b7..71fb33d 100644 --- a/data/sig_ref.py +++ b/data/sig_ref.py @@ -1,9 +1,11 @@ from dataclasses import dataclass -# Data class that defines a Special Interest Group "info" or reference. As well as the basic reference ID we include a -# name and a lookup URL. + @dataclass class SIGRef: + """Data class that defines a Special Interest Group "info" or reference. As well as the basic reference ID we include a + name and a lookup URL.""" + # Reference ID, e.g. "GB-0001". id: str # SIG that this reference is in, e.g. "POTA". @@ -19,4 +21,4 @@ class SIGRef: # Maidenhead grid reference of the reference, if known. grid: str = None # Activation score. SOTA only - activation_score: int = None \ No newline at end of file + activation_score: int = None diff --git a/data/spot.py b/data/spot.py index 7715ffa..6a6b73c 100644 --- a/data/spot.py +++ b/data/spot.py @@ -17,9 +17,10 @@ from core.sig_utils import populate_sig_ref_info, ANY_SIG_REGEX, get_ref_regex_f from data.sig_ref import SIGRef -# Data class that defines a spot. @dataclass class Spot: + """Data class that defines a spot.""" + # Unique identifier for the spot id: str = None @@ -129,8 +130,9 @@ class Spot: # The ID the source gave it, if any. source_id: str = None - # Infer missing parameters where possible def infer_missing(self): + """Infer missing parameters where possible""" + # If we somehow don't have a spot time, set it to zero so it sorts off the bottom of any list but # clients can still reliably parse it as a number. if not self.time: @@ -186,7 +188,8 @@ class Spot: # Spotter country, continent, zones etc. from callsign. # DE call with no digits, or APRS servers starting "T2" are not things we can look up location for - if self.de_call and any(char.isdigit() for char in self.de_call) and not (self.de_call.startswith("T2") and self.source == "APRS-IS"): + if self.de_call and any(char.isdigit() for char in self.de_call) and not ( + self.de_call.startswith("T2") and self.source == "APRS-IS"): if not self.de_country: self.de_country = lookup_helper.infer_country_from_callsign(self.de_call) if not self.de_continent: @@ -253,7 +256,8 @@ class Spot: # If so, add that to the sig_refs list for this spot. ref_regex = get_ref_regex_for_sig(found_sig) if ref_regex: - ref_matches = re.finditer(r"(^|\W)" + found_sig + r"($|\W)(" + ref_regex + r")($|\W)", self.comment, re.IGNORECASE) + ref_matches = re.finditer(r"(^|\W)" + found_sig + r"($|\W)(" + ref_regex + r")($|\W)", self.comment, + re.IGNORECASE) for ref_match in ref_matches: self.append_sig_ref_if_missing(SIGRef(id=ref_match.group(3).upper(), sig=found_sig)) @@ -343,12 +347,13 @@ class Spot: # DX Location is "good" if it is from a spot, or from QRZ if the callsign doesn't contain a slash, so the operator # is likely at home. self.dx_location_good = self.dx_latitude and self.dx_longitude and ( - self.dx_location_source == "SPOT" or self.dx_location_source == "SIG REF LOOKUP" - or self.dx_location_source == "WAB/WAI GRID" - or (self.dx_location_source == "HOME QTH" and not "/" in self.dx_call)) + self.dx_location_source == "SPOT" or self.dx_location_source == "SIG REF LOOKUP" + or self.dx_location_source == "WAB/WAI GRID" + or (self.dx_location_source == "HOME QTH" and not "/" in self.dx_call)) # DE with no digits and APRS servers starting "T2" are not things we can look up location for - if self.de_call and any(char.isdigit() for char in self.de_call) and not (self.de_call.startswith("T2") and self.source == "APRS-IS"): + if self.de_call and any(char.isdigit() for char in self.de_call) and not ( + self.de_call.startswith("T2") and self.source == "APRS-IS"): # DE operator position lookup, using QRZ.com. if not self.de_latitude: latlon = lookup_helper.infer_latlon_from_callsign_online_lookup(self.de_call) @@ -375,12 +380,14 @@ class Spot: self_copy.received_time_iso = "" self.id = hashlib.sha256(str(self_copy).encode("utf-8")).hexdigest() - # JSON sspoterialise def to_json(self): + """JSON serialise""" + return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True) - # Append a sig_ref to the list, so long as it's not already there. def append_sig_ref_if_missing(self, new_sig_ref): + """Append a sig_ref to the list, so long as it's not already there.""" + if not self.sig_refs: self.sig_refs = [] new_sig_ref.id = new_sig_ref.id.strip().upper() @@ -392,9 +399,10 @@ class Spot: return self.sig_refs.append(new_sig_ref) - # Decide if this spot has expired (in which case it should not be added to the system in the first place, and not - # returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as - # either having a time further ago than the server's MAX_SPOT_AGE. If it somehow doesn't have a time either, it is - # considered to be expired. def expired(self): - return not self.time or self.time < (datetime.now(pytz.UTC) - timedelta(seconds=MAX_SPOT_AGE)).timestamp() \ No newline at end of file + """Decide if this spot has expired (in which case it should not be added to the system in the first place, and not + returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as + either having a time further ago than the server's MAX_SPOT_AGE. If it somehow doesn't have a time either, it is + considered to be expired.""" + + return not self.time or self.time < (datetime.now(pytz.UTC) - timedelta(seconds=MAX_SPOT_AGE)).timestamp() diff --git a/server/handlers/api/addspot.py b/server/handlers/api/addspot.py index b37b916..024b208 100644 --- a/server/handlers/api/addspot.py +++ b/server/handlers/api/addspot.py @@ -16,8 +16,9 @@ from data.sig_ref import SIGRef from data.spot import Spot -# API request handler for /api/v1/spot (POST) class APISpotHandler(tornado.web.RequestHandler): + """API request handler for /api/v1/spot (POST)""" + def initialize(self, spots, web_server_metrics): self.spots = spots self.web_server_metrics = web_server_metrics @@ -34,15 +35,17 @@ class APISpotHandler(tornado.web.RequestHandler): if not ALLOW_SPOTTING: self.set_status(401) self.write(json.dumps("Error - this server does not allow new spots to be added via the API.", - default=serialize_everything)) + default=serialize_everything)) self.set_header("Cache-Control", "no-store") self.set_header("Content-Type", "application/json") return # Reject if format not json - if 'Content-Type' not in self.request.headers or self.request.headers.get('Content-Type') != "application/json": + if 'Content-Type' not in self.request.headers or self.request.headers.get( + 'Content-Type') != "application/json": self.set_status(415) - self.write(json.dumps("Error - request Content-Type must be application/json", default=serialize_everything)) + self.write( + json.dumps("Error - request Content-Type must be application/json", default=serialize_everything)) self.set_header("Cache-Control", "no-store") self.set_header("Content-Type", "application/json") return @@ -72,7 +75,7 @@ class APISpotHandler(tornado.web.RequestHandler): if not spot.time or not spot.dx_call or not spot.freq or not spot.de_call: self.set_status(422) self.write(json.dumps("Error - 'time', 'dx_call', 'freq' and 'de_call' must be provided as a minimum.", - default=serialize_everything)) + default=serialize_everything)) self.set_header("Cache-Control", "no-store") self.set_header("Content-Type", "application/json") return @@ -81,14 +84,14 @@ class APISpotHandler(tornado.web.RequestHandler): if not re.match(r"^[A-Za-z0-9/\-]*$", spot.dx_call): self.set_status(422) self.write(json.dumps("Error - '" + spot.dx_call + "' does not look like a valid callsign.", - default=serialize_everything)) + default=serialize_everything)) self.set_header("Cache-Control", "no-store") self.set_header("Content-Type", "application/json") return if not re.match(r"^[A-Za-z0-9/\-]*$", spot.de_call): self.set_status(422) self.write(json.dumps("Error - '" + spot.de_call + "' does not look like a valid callsign.", - default=serialize_everything)) + default=serialize_everything)) self.set_header("Cache-Control", "no-store") self.set_header("Content-Type", "application/json") return @@ -97,7 +100,7 @@ class APISpotHandler(tornado.web.RequestHandler): if lookup_helper.infer_band_from_freq(spot.freq) == UNKNOWN_BAND: self.set_status(422) self.write(json.dumps("Error - Frequency of " + str(spot.freq / 1000.0) + "kHz is not in a known band.", - default=serialize_everything)) + default=serialize_everything)) self.set_header("Cache-Control", "no-store") self.set_header("Content-Type", "application/json") return @@ -108,7 +111,7 @@ class APISpotHandler(tornado.web.RequestHandler): spot.dx_grid.upper()): self.set_status(422) self.write(json.dumps("Error - '" + spot.dx_grid + "' does not look like a valid Maidenhead grid.", - default=serialize_everything)) + default=serialize_everything)) self.set_header("Cache-Control", "no-store") self.set_header("Content-Type", "application/json") return diff --git a/server/handlers/api/alerts.py b/server/handlers/api/alerts.py index 34307cd..765392a 100644 --- a/server/handlers/api/alerts.py +++ b/server/handlers/api/alerts.py @@ -14,8 +14,9 @@ SSE_HANDLER_MAX_QUEUE_SIZE = 100 SSE_HANDLER_QUEUE_CHECK_INTERVAL = 5000 -# API request handler for /api/v1/alerts class APIAlertsHandler(tornado.web.RequestHandler): + """API request handler for /api/v1/alerts""" + def initialize(self, alerts, web_server_metrics): self.alerts = alerts self.web_server_metrics = web_server_metrics @@ -47,14 +48,17 @@ class APIAlertsHandler(tornado.web.RequestHandler): self.set_header("Cache-Control", "no-store") self.set_header("Content-Type", "application/json") -# API request handler for /api/v1/alerts/stream + class APIAlertsStreamHandler(tornado_eventsource.handler.EventSourceHandler): + """API request handler for /api/v1/alerts/stream""" + def initialize(self, sse_alert_queues, web_server_metrics): self.sse_alert_queues = sse_alert_queues self.web_server_metrics = web_server_metrics - # Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data def custom_headers(self): + """Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data""" + return {"Cache-Control": "no-store", "X-Accel-Buffering": "no"} @@ -81,8 +85,9 @@ class APIAlertsStreamHandler(tornado_eventsource.handler.EventSourceHandler): except Exception as e: logging.warn("Exception when serving SSE socket", e) - # When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it def close(self): + """When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it""" + try: if self.alert_queue in self.sse_alert_queues: self.sse_alert_queues.remove(self.alert_queue) @@ -96,8 +101,9 @@ class APIAlertsStreamHandler(tornado_eventsource.handler.EventSourceHandler): self.alert_queue = None super().close() - # Callback to check if anything has arrived in the queue, and if so send it to the client def _callback(self): + """Callback to check if anything has arrived in the queue, and if so send it to the client""" + try: if self.alert_queue: while not self.alert_queue.empty(): @@ -114,11 +120,10 @@ class APIAlertsStreamHandler(tornado_eventsource.handler.EventSourceHandler): self.close() - - -# Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in -# the main "alerts" GET call. def get_alert_list_with_filters(all_alerts, query): + """Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in + the main "alerts" GET call.""" + # Create a shallow copy of the alert list ordered by start time, then filter the list to reduce it only to alerts # that match the filter parameters in the query string. Finally, apply a limit to the number of alerts returned. # The list of query string filters is defined in the API docs. @@ -134,9 +139,11 @@ def get_alert_list_with_filters(all_alerts, query): alerts = alerts[:int(query.get("limit"))] return alerts -# Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list -# of query parameters and their function is defined in the API docs. + def alert_allowed_by_query(alert, query): + """Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list + of query parameters and their function is defined in the API docs.""" + for k in query.keys(): match k: case "received_since": diff --git a/server/handlers/api/lookups.py b/server/handlers/api/lookups.py index 05a2f39..275133d 100644 --- a/server/handlers/api/lookups.py +++ b/server/handlers/api/lookups.py @@ -16,8 +16,9 @@ from data.sig_ref import SIGRef from data.spot import Spot -# API request handler for /api/v1/lookup/call class APILookupCallHandler(tornado.web.RequestHandler): + """API request handler for /api/v1/lookup/call""" + def initialize(self, web_server_metrics): self.web_server_metrics = web_server_metrics @@ -75,8 +76,9 @@ class APILookupCallHandler(tornado.web.RequestHandler): self.set_header("Content-Type", "application/json") -# API request handler for /api/v1/lookup/sigref class APILookupSIGRefHandler(tornado.web.RequestHandler): + """API request handler for /api/v1/lookup/sigref""" + def initialize(self, web_server_metrics): self.web_server_metrics = web_server_metrics @@ -123,9 +125,9 @@ class APILookupSIGRefHandler(tornado.web.RequestHandler): self.set_header("Content-Type", "application/json") - -# API request handler for /api/v1/lookup/grid class APILookupGridHandler(tornado.web.RequestHandler): + """API request handler for /api/v1/lookup/grid""" + def initialize(self, web_server_metrics): self.web_server_metrics = web_server_metrics @@ -152,17 +154,17 @@ class APILookupGridHandler(tornado.web.RequestHandler): center_itu_zone = lat_lon_to_itu_zone(center_lat, center_lon) response = { - "center" : { + "center": { "latitude": center_lat, "longitude": center_lon, "cq_zone": center_cq_zone, "itu_zone": center_itu_zone }, - "southwest" : { + "southwest": { "latitude": lat, "longitude": lon, }, - "northeast" : { + "northeast": { "latitude": lat + lat_cell_size, "longitude": lon + lon_cell_size, }} diff --git a/server/handlers/api/options.py b/server/handlers/api/options.py index cbbe97a..78d68a6 100644 --- a/server/handlers/api/options.py +++ b/server/handlers/api/options.py @@ -10,8 +10,9 @@ from core.prometheus_metrics_handler import api_requests_counter from core.utils import serialize_everything -# API request handler for /api/v1/options class APIOptionsHandler(tornado.web.RequestHandler): + """API request handler for /api/v1/options""" + def initialize(self, status_data, web_server_metrics): self.status_data = status_data self.web_server_metrics = web_server_metrics diff --git a/server/handlers/api/spots.py b/server/handlers/api/spots.py index c75320c..945a33b 100644 --- a/server/handlers/api/spots.py +++ b/server/handlers/api/spots.py @@ -14,8 +14,9 @@ SSE_HANDLER_MAX_QUEUE_SIZE = 1000 SSE_HANDLER_QUEUE_CHECK_INTERVAL = 5000 -# API request handler for /api/v1/spots class APISpotsHandler(tornado.web.RequestHandler): + """API request handler for /api/v1/spots""" + def initialize(self, spots, web_server_metrics): self.spots = spots self.web_server_metrics = web_server_metrics @@ -48,19 +49,22 @@ class APISpotsHandler(tornado.web.RequestHandler): self.set_header("Content-Type", "application/json") -# API request handler for /api/v1/spots/stream class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler): + """API request handler for /api/v1/spots/stream""" + def initialize(self, sse_spot_queues, web_server_metrics): self.sse_spot_queues = sse_spot_queues self.web_server_metrics = web_server_metrics - # Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data def custom_headers(self): + """Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data""" + return {"Cache-Control": "no-store", "X-Accel-Buffering": "no"} - # Called once on the client opening a connection, set things up def open(self): + """Called once on the client opening a connection, set things up""" + try: # Metrics self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC) @@ -83,8 +87,9 @@ class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler): except Exception as e: logging.warn("Exception when serving SSE socket", e) - # When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it def close(self): + """When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it""" + try: if self.spot_queue in self.sse_spot_queues: self.sse_spot_queues.remove(self.spot_queue) @@ -98,8 +103,9 @@ class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler): self.spot_queue = None super().close() - # Callback to check if anything has arrived in the queue, and if so send it to the client def _callback(self): + """Callback to check if anything has arrived in the queue, and if so send it to the client""" + try: if self.spot_queue: while not self.spot_queue.empty(): @@ -116,10 +122,10 @@ class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler): self.close() - -# Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in -# the main "spots" GET call. def get_spot_list_with_filters(all_spots, query): + """Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in + the main "spots" GET call.""" + # Create a shallow copy of the spot list, ordered by spot time, then filter the list to reduce it only to spots # that match the filter parameters in the query string. Finally, apply a limit to the number of spots returned. # The list of query string filters is defined in the API docs. @@ -142,22 +148,24 @@ def get_spot_list_with_filters(all_spots, query): # duplicates are fine in the main spot list (e.g. different cluster spots of the same DX) this doesn't # work well for the other views. if "dedupe" in query.keys(): - dedupe = query.get("dedupe").upper() == "TRUE" - if dedupe: - spots_temp = [] - already_seen = [] - for s in spots: - call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "") - if call_plus_ssid not in already_seen: - spots_temp.append(s) - already_seen.append(call_plus_ssid) - spots = spots_temp + dedupe = query.get("dedupe").upper() == "TRUE" + if dedupe: + spots_temp = [] + already_seen = [] + for s in spots: + call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "") + if call_plus_ssid not in already_seen: + spots_temp.append(s) + already_seen.append(call_plus_ssid) + spots = spots_temp return spots -# Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list -# of query parameters and their function is defined in the API docs. + def spot_allowed_by_query(spot, query): + """Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list + of query parameters and their function is defined in the API docs.""" + for k in query.keys(): match k: case "since": @@ -240,4 +248,4 @@ def spot_allowed_by_query(spot, query): needs_good_location = query.get(k).upper() == "TRUE" if needs_good_location and not spot.dx_location_good: return False - return True \ No newline at end of file + return True diff --git a/server/handlers/api/status.py b/server/handlers/api/status.py index 39808a4..6f2aebc 100644 --- a/server/handlers/api/status.py +++ b/server/handlers/api/status.py @@ -8,8 +8,9 @@ from core.prometheus_metrics_handler import api_requests_counter from core.utils import serialize_everything -# API request handler for /api/v1/status class APIStatusHandler(tornado.web.RequestHandler): + """API request handler for /api/v1/status""" + def initialize(self, status_data, web_server_metrics): self.status_data = status_data self.web_server_metrics = web_server_metrics diff --git a/server/handlers/metrics.py b/server/handlers/metrics.py index a9c0b95..e60b0a7 100644 --- a/server/handlers/metrics.py +++ b/server/handlers/metrics.py @@ -4,8 +4,9 @@ from prometheus_client import CONTENT_TYPE_LATEST from core.prometheus_metrics_handler import get_metrics -# Handler for Prometheus metrics endpoint class PrometheusMetricsHandler(tornado.web.RequestHandler): + """Handler for Prometheus metrics endpoint""" + def get(self): self.write(get_metrics()) self.set_status(200) diff --git a/server/handlers/pagetemplate.py b/server/handlers/pagetemplate.py index c2419c4..5f872bd 100644 --- a/server/handlers/pagetemplate.py +++ b/server/handlers/pagetemplate.py @@ -8,8 +8,9 @@ from core.constants import SOFTWARE_VERSION from core.prometheus_metrics_handler import page_requests_counter -# Handler for all HTML pages generated from templates class PageTemplateHandler(tornado.web.RequestHandler): + """Handler for all HTML pages generated from templates""" + def initialize(self, template_name, web_server_metrics): self.template_name = template_name self.web_server_metrics = web_server_metrics @@ -24,4 +25,3 @@ class PageTemplateHandler(tornado.web.RequestHandler): # Load named template, and provide variables used in templates self.render(self.template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING, web_ui_options=WEB_UI_OPTIONS) - diff --git a/server/webserver.py b/server/webserver.py index e98956e..b4bfd1a 100644 --- a/server/webserver.py +++ b/server/webserver.py @@ -16,10 +16,12 @@ from server.handlers.metrics import PrometheusMetricsHandler from server.handlers.pagetemplate import PageTemplateHandler -# Provides the public-facing web server. class WebServer: - # Constructor + """Provides the public-facing web server.""" + def __init__(self, spots, alerts, status_data, port): + """Constructor""" + self.spots = spots self.alerts = alerts self.sse_spot_queues = [] @@ -35,24 +37,32 @@ class WebServer: "status": "Starting" } - # Start the web server def start(self): + """Start the web server""" + asyncio.run(self.start_inner()) - # Stop the web server def stop(self): + """Stop the web server""" + self.shutdown_event.set() - # Start method (async). Sets up the Tornado application. async def start_inner(self): + """Start method (async). Sets up the Tornado application.""" + app = tornado.web.Application([ # Routes for API calls (r"/api/v1/spots", APISpotsHandler, {"spots": self.spots, "web_server_metrics": self.web_server_metrics}), - (r"/api/v1/alerts", APIAlertsHandler, {"alerts": self.alerts, "web_server_metrics": self.web_server_metrics}), - (r"/api/v1/spots/stream", APISpotsStreamHandler, {"sse_spot_queues": self.sse_spot_queues, "web_server_metrics": self.web_server_metrics}), - (r"/api/v1/alerts/stream", APIAlertsStreamHandler, {"sse_alert_queues": self.sse_alert_queues, "web_server_metrics": self.web_server_metrics}), - (r"/api/v1/options", APIOptionsHandler, {"status_data": self.status_data, "web_server_metrics": self.web_server_metrics}), - (r"/api/v1/status", APIStatusHandler, {"status_data": self.status_data, "web_server_metrics": self.web_server_metrics}), + (r"/api/v1/alerts", APIAlertsHandler, + {"alerts": self.alerts, "web_server_metrics": self.web_server_metrics}), + (r"/api/v1/spots/stream", APISpotsStreamHandler, + {"sse_spot_queues": self.sse_spot_queues, "web_server_metrics": self.web_server_metrics}), + (r"/api/v1/alerts/stream", APIAlertsStreamHandler, + {"sse_alert_queues": self.sse_alert_queues, "web_server_metrics": self.web_server_metrics}), + (r"/api/v1/options", APIOptionsHandler, + {"status_data": self.status_data, "web_server_metrics": self.web_server_metrics}), + (r"/api/v1/status", APIStatusHandler, + {"status_data": self.status_data, "web_server_metrics": self.web_server_metrics}), (r"/api/v1/lookup/call", APILookupCallHandler, {"web_server_metrics": self.web_server_metrics}), (r"/api/v1/lookup/sigref", APILookupSIGRefHandler, {"web_server_metrics": self.web_server_metrics}), (r"/api/v1/lookup/grid", APILookupGridHandler, {"web_server_metrics": self.web_server_metrics}), @@ -61,11 +71,15 @@ class WebServer: (r"/", PageTemplateHandler, {"template_name": "spots", "web_server_metrics": self.web_server_metrics}), (r"/map", PageTemplateHandler, {"template_name": "map", "web_server_metrics": self.web_server_metrics}), (r"/bands", PageTemplateHandler, {"template_name": "bands", "web_server_metrics": self.web_server_metrics}), - (r"/alerts", PageTemplateHandler, {"template_name": "alerts", "web_server_metrics": self.web_server_metrics}), - (r"/add-spot", PageTemplateHandler, {"template_name": "add_spot", "web_server_metrics": self.web_server_metrics}), - (r"/status", PageTemplateHandler, {"template_name": "status", "web_server_metrics": self.web_server_metrics}), + (r"/alerts", PageTemplateHandler, + {"template_name": "alerts", "web_server_metrics": self.web_server_metrics}), + (r"/add-spot", PageTemplateHandler, + {"template_name": "add_spot", "web_server_metrics": self.web_server_metrics}), + (r"/status", PageTemplateHandler, + {"template_name": "status", "web_server_metrics": self.web_server_metrics}), (r"/about", PageTemplateHandler, {"template_name": "about", "web_server_metrics": self.web_server_metrics}), - (r"/apidocs", PageTemplateHandler, {"template_name": "apidocs", "web_server_metrics": self.web_server_metrics}), + (r"/apidocs", PageTemplateHandler, + {"template_name": "apidocs", "web_server_metrics": self.web_server_metrics}), # Route for Prometheus metrics (r"/metrics", PrometheusMetricsHandler), # Default route to serve from "webassets" @@ -76,9 +90,10 @@ class WebServer: app.listen(self.port) await self.shutdown_event.wait() - # Internal method called when a new spot is added to the system. This is used to ping any SSE clients that are - # awaiting a server-sent message with new spots. def notify_new_spot(self, spot): + """Internal method called when a new spot is added to the system. This is used to ping any SSE clients that are + awaiting a server-sent message with new spots.""" + for queue in self.sse_spot_queues: try: queue.put(spot) @@ -87,9 +102,10 @@ class WebServer: pass pass - # Internal method called when a new alert is added to the system. This is used to ping any SSE clients that are - # awaiting a server-sent message with new spots. def notify_new_alert(self, alert): + """Internal method called when a new alert is added to the system. This is used to ping any SSE clients that are + awaiting a server-sent message with new spots.""" + for queue in self.sse_alert_queues: try: queue.put(alert) @@ -98,13 +114,15 @@ class WebServer: pass pass - # Clean up any SSE queues that are growing too large; probably their client disconnected and we didn't catch it - # properly for some reason. def clean_up_sse_queues(self): + """Clean up any SSE queues that are growing too large; probably their client disconnected and we didn't catch it + properly for some reason.""" + for q in self.sse_spot_queues: try: if q.full(): - logging.warn("A full SSE spot queue was found, presumably because the client disconnected strangely. It has been removed.") + logging.warn( + "A full SSE spot queue was found, presumably because the client disconnected strangely. It has been removed.") self.sse_spot_queues.remove(q) empty_queue(q) except: @@ -113,7 +131,8 @@ class WebServer: for q in self.sse_alert_queues: try: if q.full(): - logging.warn("A full SSE alert queue was found, presumably because the client disconnected strangely. It has been removed.") + logging.warn( + "A full SSE alert queue was found, presumably because the client disconnected strangely. It has been removed.") self.sse_alert_queues.remove(q) empty_queue(q) except: diff --git a/spothole.py b/spothole.py index fc6ff33..01dfcce 100644 --- a/spothole.py +++ b/spothole.py @@ -25,8 +25,9 @@ cleanup_timer = None run = True -# Shutdown function def shutdown(sig, frame): + """Shutdown function""" + global run logging.info("Stopping program...") @@ -44,15 +45,17 @@ def shutdown(sig, frame): os._exit(0) -# Utility method to get a spot provider based on the class specified in its config entry. def get_spot_provider_from_config(config_providers_entry): + """Utility method to get a spot provider based on the class specified in its config entry.""" + module = importlib.import_module('spotproviders.' + config_providers_entry["class"].lower()) provider_class = getattr(module, config_providers_entry["class"]) return provider_class(config_providers_entry) -# Utility method to get an alert provider based on the class specified in its config entry. def get_alert_provider_from_config(config_providers_entry): + """Utility method to get an alert provider based on the class specified in its config entry.""" + module = importlib.import_module('alertproviders.' + config_providers_entry["class"].lower()) provider_class = getattr(module, config_providers_entry["class"]) return provider_class(config_providers_entry) diff --git a/spotproviders/aprsis.py b/spotproviders/aprsis.py index da27afb..df62bfa 100644 --- a/spotproviders/aprsis.py +++ b/spotproviders/aprsis.py @@ -10,8 +10,8 @@ from data.spot import Spot from spotproviders.spot_provider import SpotProvider -# Spot provider for the APRS-IS. class APRSIS(SpotProvider): + """Spot provider for the APRS-IS.""" def __init__(self, provider_config): super().__init__(provider_config) @@ -51,11 +51,12 @@ class APRSIS(SpotProvider): comment=data["comment"] if "comment" in data else None, dx_latitude=data["latitude"] if "latitude" in data else None, dx_longitude=data["longitude"] if "longitude" in data else None, - time=datetime.now(pytz.UTC).timestamp()) # APRS-IS spots are live so we can assume spot time is "now" + time=datetime.now( + pytz.UTC).timestamp()) # APRS-IS spots are live so we can assume spot time is "now" # Add to our list self.submit(spot) self.status = "OK" self.last_update_time = datetime.now(pytz.UTC) - logging.debug("Data received from APRS-IS.") \ No newline at end of file + logging.debug("Data received from APRS-IS.") diff --git a/spotproviders/dxcluster.py b/spotproviders/dxcluster.py index ceb84e0..8fd8ac5 100644 --- a/spotproviders/dxcluster.py +++ b/spotproviders/dxcluster.py @@ -12,9 +12,10 @@ from data.spot import Spot from spotproviders.spot_provider import SpotProvider -# Spot provider for a DX Cluster. Hostname, port, login_prompt, login_callsign and allow_rbn_spots are provided in config. -# See config-example.yml for examples. class DXCluster(SpotProvider): + """Spot provider for a DX Cluster. Hostname, port, login_prompt, login_callsign and allow_rbn_spots are provided in config. + See config-example.yml for examples.""" + CALLSIGN_PATTERN = "([a-z|0-9|/]+)" FREQUENCY_PATTERN = "([0-9|.]+)" LINE_PATTERN_EXCLUDE_RBN = re.compile( @@ -24,13 +25,15 @@ class DXCluster(SpotProvider): "^DX de " + CALLSIGN_PATTERN + "-?#?:\\s+" + FREQUENCY_PATTERN + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)", re.IGNORECASE) - # Constructor requires hostname and port def __init__(self, provider_config): + """Constructor requires hostname and port""" + super().__init__(provider_config) self.hostname = provider_config["host"] self.port = provider_config["port"] self.login_prompt = provider_config["login_prompt"] if "login_prompt" in provider_config else "login:" - self.login_callsign = provider_config["login_callsign"] if "login_callsign" in provider_config else SERVER_OWNER_CALLSIGN + self.login_callsign = provider_config[ + "login_callsign"] if "login_callsign" in provider_config else SERVER_OWNER_CALLSIGN self.allow_rbn_spots = provider_config["allow_rbn_spots"] if "allow_rbn_spots" in provider_config else False self.spot_line_pattern = self.LINE_PATTERN_ALLOW_RBN if self.allow_rbn_spots else self.LINE_PATTERN_EXCLUDE_RBN self.telnet = None @@ -96,4 +99,4 @@ class DXCluster(SpotProvider): logging.info("DX Cluster " + self.hostname + " shutting down...") self.status = "Shutting down" - self.status = "Disconnected" \ No newline at end of file + self.status = "Disconnected" diff --git a/spotproviders/gma.py b/spotproviders/gma.py index 4ba1502..d2bfe1b 100644 --- a/spotproviders/gma.py +++ b/spotproviders/gma.py @@ -10,8 +10,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for General Mountain Activity class GMA(HTTPSpotProvider): + """Spot provider for General Mountain Activity""" + POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://www.cqgma.org/api/spots/25/" # GMA spots don't contain the details of the programme they are for, we need a separate lookup for that @@ -36,9 +37,11 @@ class GMA(HTTPSpotProvider): sig_refs=[SIGRef(id=source_spot["REF"], sig="", name=source_spot["NAME"])], time=datetime.strptime(source_spot["DATE"] + source_spot["TIME"], "%Y%m%d%H%M").replace( tzinfo=pytz.UTC).timestamp(), - dx_latitude=float(source_spot["LAT"]) if (source_spot["LAT"] and source_spot["LAT"] != "") else None, + dx_latitude=float(source_spot["LAT"]) if ( + source_spot["LAT"] and source_spot["LAT"] != "") else None, # Seen GMA spots with no (or empty) lat/lon - dx_longitude=float(source_spot["LON"]) if (source_spot["LON"] and source_spot["LON"] != "") else None) + dx_longitude=float(source_spot["LON"]) if ( + source_spot["LON"] and source_spot["LON"] != "") else None) # GMA doesn't give what programme (SIG) the reference is for until we separately look it up. if "REF" in source_spot: @@ -83,5 +86,6 @@ class GMA(HTTPSpotProvider): # that for us. new_spots.append(spot) except: - logging.warn("Exception when looking up " + self.REF_INFO_URL_ROOT + source_spot["REF"] + ", ignoring this spot for now") + logging.warn("Exception when looking up " + self.REF_INFO_URL_ROOT + source_spot[ + "REF"] + ", ignoring this spot for now") return new_spots diff --git a/spotproviders/hema.py b/spotproviders/hema.py index 76b97b3..31a4deb 100644 --- a/spotproviders/hema.py +++ b/spotproviders/hema.py @@ -10,8 +10,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for HuMPs Excluding Marilyns Award class HEMA(HTTPSpotProvider): + """Spot provider for HuMPs Excluding Marilyns Award""" + POLL_INTERVAL_SEC = 300 # HEMA wants us to check for a "spot seed" from the API and see if it's actually changed before querying the main # data API. So it's actually the SPOT_SEED_URL that we pass into the constructor and get the superclass to call on a @@ -54,11 +55,12 @@ class HEMA(HTTPSpotProvider): comment=spotter_comment_match.group(2), sig="HEMA", sig_refs=[SIGRef(id=spot_items[3].upper(), sig="HEMA", name=spot_items[4])], - time=datetime.strptime(spot_items[0], "%d/%m/%Y %H:%M").replace(tzinfo=pytz.UTC).timestamp(), + time=datetime.strptime(spot_items[0], "%d/%m/%Y %H:%M").replace( + tzinfo=pytz.UTC).timestamp(), dx_latitude=float(spot_items[7]), dx_longitude=float(spot_items[8])) # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # that for us. new_spots.append(spot) - return new_spots \ No newline at end of file + return new_spots diff --git a/spotproviders/http_spot_provider.py b/spotproviders/http_spot_provider.py index 853db65..90a746a 100644 --- a/spotproviders/http_spot_provider.py +++ b/spotproviders/http_spot_provider.py @@ -9,9 +9,9 @@ from core.constants import HTTP_HEADERS from spotproviders.spot_provider import SpotProvider -# Generic spot provider class for providers that request data via HTTP(S). Just for convenience to avoid code -# duplication. Subclasses of this query the individual APIs for data. class HTTPSpotProvider(SpotProvider): + """Generic spot provider class for providers that request data via HTTP(S). Just for convenience to avoid code + duplication. Subclasses of this query the individual APIs for data.""" def __init__(self, provider_config, url, poll_interval): super().__init__(provider_config) @@ -55,8 +55,9 @@ class HTTPSpotProvider(SpotProvider): logging.exception("Exception in HTTP JSON Spot Provider (" + self.name + ")") self._stop_event.wait(timeout=1) - # Convert an HTTP response returned by the API into spot data. The whole response is provided here so the subclass - # implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever - # the API actually provides. def http_response_to_spots(self, http_response): - raise NotImplementedError("Subclasses must implement this method") \ No newline at end of file + """Convert an HTTP response returned by the API into spot data. The whole response is provided here so the subclass + implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever + the API actually provides.""" + + raise NotImplementedError("Subclasses must implement this method") diff --git a/spotproviders/llota.py b/spotproviders/llota.py index 2c362f9..2b13d66 100644 --- a/spotproviders/llota.py +++ b/spotproviders/llota.py @@ -5,8 +5,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for Lagos y Lagunas On the Air class LLOTA(HTTPSpotProvider): + """Spot provider for Lagos y Lagunas On the Air""" + POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://llota.app/api/public/spots" @@ -38,4 +39,4 @@ class LLOTA(HTTPSpotProvider): # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # that for us. new_spots.append(spot) - return new_spots \ No newline at end of file + return new_spots diff --git a/spotproviders/parksnpeaks.py b/spotproviders/parksnpeaks.py index 4b7ee7a..ac01ce7 100644 --- a/spotproviders/parksnpeaks.py +++ b/spotproviders/parksnpeaks.py @@ -9,8 +9,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for Parks n Peaks class ParksNPeaks(HTTPSpotProvider): + """Spot provider for Parks n Peaks""" + POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://www.parksnpeaks.org/api/ALL" SIOTA_LIST_URL = "https://www.silosontheair.com/data/silos.csv" @@ -26,9 +27,10 @@ class ParksNPeaks(HTTPSpotProvider): spot = Spot(source=self.name, source_id=source_spot["actID"], dx_call=source_spot["actCallsign"].upper(), - de_call=source_spot["actSpoter"].upper() if source_spot["actSpoter"] != "" else None, # typo exists in API + de_call=source_spot["actSpoter"].upper() if source_spot["actSpoter"] != "" else None, + # typo exists in API freq=float(source_spot["actFreq"].replace(",", "")) * 1000000 if ( - source_spot["actFreq"] != "") else None, + source_spot["actFreq"] != "") else None, # Seen PNP spots with empty frequency, and with comma-separated thousands digits mode=source_spot["actMode"].upper(), comment=source_spot["actComments"], diff --git a/spotproviders/pota.py b/spotproviders/pota.py index 08f8ab1..be13679 100644 --- a/spotproviders/pota.py +++ b/spotproviders/pota.py @@ -7,8 +7,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for Parks on the Air class POTA(HTTPSpotProvider): + """Spot provider for Parks on the Air""" + POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://api.pota.app/spot/activator" diff --git a/spotproviders/rbn.py b/spotproviders/rbn.py index b8ac373..eb3359c 100644 --- a/spotproviders/rbn.py +++ b/spotproviders/rbn.py @@ -12,17 +12,19 @@ from data.spot import Spot from spotproviders.spot_provider import SpotProvider -# Spot provider for the Reverse Beacon Network. Connects to a single port, if you want both CW/RTTY (port 7000) and FT8 -# (port 7001) you need to instantiate two copies of this. The port is provided as an argument to the constructor. class RBN(SpotProvider): + """Spot provider for the Reverse Beacon Network. Connects to a single port, if you want both CW/RTTY (port 7000) and FT8 + (port 7001) you need to instantiate two copies of this. The port is provided as an argument to the constructor.""" + CALLSIGN_PATTERN = "([a-z|0-9|/]+)" FREQUENCY_PATTERM = "([0-9|.]+)" LINE_PATTERN = re.compile( "^DX de " + CALLSIGN_PATTERN + "-.*:\\s+" + FREQUENCY_PATTERM + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)", re.IGNORECASE) - # Constructor requires port number. def __init__(self, provider_config): + """Constructor requires port number.""" + super().__init__(provider_config) self.port = provider_config["port"] self.telnet = None @@ -30,7 +32,6 @@ class RBN(SpotProvider): self.thread.daemon = True self.run = True - def start(self): self.thread.start() @@ -89,4 +90,4 @@ class RBN(SpotProvider): logging.info("RBN provider (port " + str(self.port) + ") shutting down...") self.status = "Shutting down" - self.status = "Disconnected" \ No newline at end of file + self.status = "Disconnected" diff --git a/spotproviders/sota.py b/spotproviders/sota.py index 56026d1..35dadd3 100644 --- a/spotproviders/sota.py +++ b/spotproviders/sota.py @@ -8,8 +8,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for Summits on the Air class SOTA(HTTPSpotProvider): + """Spot provider for Summits on the Air""" + POLL_INTERVAL_SEC = 120 # SOTA wants us to check for an "epoch" from the API and see if it's actually changed before querying the main data # APIs. So it's actually the EPOCH_URL that we pass into the constructor and get the superclass to call on a timer. @@ -41,14 +42,17 @@ class SOTA(HTTPSpotProvider): dx_call=source_spot["activatorCallsign"].upper(), dx_name=source_spot["activatorName"], de_call=source_spot["callsign"].upper(), - freq=(float(source_spot["frequency"]) * 1000000) if (source_spot["frequency"] is not None) else None, # Seen SOTA spots with no frequency! + freq=(float(source_spot["frequency"]) * 1000000) if ( + source_spot["frequency"] is not None) else None, + # Seen SOTA spots with no frequency! mode=source_spot["mode"].upper(), comment=source_spot["comments"], sig="SOTA", - sig_refs=[SIGRef(id=source_spot["summitCode"], sig="SOTA", name=source_spot["summitName"], activation_score=source_spot["points"])], + sig_refs=[SIGRef(id=source_spot["summitCode"], sig="SOTA", name=source_spot["summitName"], + activation_score=source_spot["points"])], time=datetime.fromisoformat(source_spot["timeStamp"].replace("Z", "+00:00")).timestamp()) # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # that for us. new_spots.append(spot) - return new_spots \ No newline at end of file + return new_spots diff --git a/spotproviders/spot_provider.py b/spotproviders/spot_provider.py index 779acf8..a2af714 100644 --- a/spotproviders/spot_provider.py +++ b/spotproviders/spot_provider.py @@ -5,11 +5,12 @@ import pytz from core.config import MAX_SPOT_AGE -# Generic spot provider class. Subclasses of this query the individual APIs for data. class SpotProvider: + """Generic spot provider class. Subclasses of this query the individual APIs for data.""" - # Constructor def __init__(self, provider_config): + """Constructor""" + self.name = provider_config["name"] self.enabled = provider_config["enabled"] self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC) @@ -18,20 +19,23 @@ class SpotProvider: self.spots = None self.web_server = None - # Set up the provider, e.g. giving it the spot list to work from def setup(self, spots, web_server): + """Set up the provider, e.g. giving it the spot list to work from""" + self.spots = spots self.web_server = web_server - # Start the provider. This should return immediately after spawning threads to access the remote resources def start(self): + """Start the provider. This should return immediately after spawning threads to access the remote resources""" + raise NotImplementedError("Subclasses must implement this method") - # Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved - # by this provider will be added to the spot list, to prevent duplications. Spots passing the check will also have - # their infer_missing() method called to complete their data set. This is called by the API-querying - # subclasses on receiving spots. def submit_batch(self, spots): + """Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved + by this provider will be added to the spot list, to prevent duplications. Spots passing the check will also have + their infer_missing() method called to complete their data set. This is called by the API-querying + subclasses on receiving spots.""" + # Sort the batch so that earliest ones go in first. This helps keep the ordering correct when spots are fired # off to SSE listeners. spots = sorted(spots, key=lambda spot: (spot.time if spot and spot.time else 0)) @@ -42,10 +46,11 @@ class SpotProvider: self.add_spot(spot) self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC) - # Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots - # passing the check will also have their infer_missing() method called to complete their data set. This is called by - # the data streaming subclasses, which can be relied upon not to re-provide old spots. def submit(self, spot): + """Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots + passing the check will also have their infer_missing() method called to complete their data set. This is called by + the data streaming subclasses, which can be relied upon not to re-provide old spots.""" + # Fill in any blanks and add to the list spot.infer_missing() self.add_spot(spot) @@ -54,10 +59,11 @@ class SpotProvider: def add_spot(self, spot): if not spot.expired(): self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) - # Ping the web server in case we have any SSE connections that need to see this immediately + # Ping the web server in case we have any SSE connections that need to see this immediately if self.web_server: self.web_server.notify_new_spot(spot) - # Stop any threads and prepare for application shutdown def stop(self): - raise NotImplementedError("Subclasses must implement this method") \ No newline at end of file + """Stop any threads and prepare for application shutdown""" + + raise NotImplementedError("Subclasses must implement this method") diff --git a/spotproviders/sse_spot_provider.py b/spotproviders/sse_spot_provider.py index 0cb73be..2e94127 100644 --- a/spotproviders/sse_spot_provider.py +++ b/spotproviders/sse_spot_provider.py @@ -10,8 +10,8 @@ from core.constants import HTTP_HEADERS from spotproviders.spot_provider import SpotProvider -# Spot provider using Server-Sent Events. class SSESpotProvider(SpotProvider): + """Spot provider using Server-Sent Events.""" def __init__(self, provider_config, url): super().__init__(provider_config) @@ -62,7 +62,8 @@ class SSESpotProvider(SpotProvider): logging.debug("Received data from " + self.name + " spot API.") except Exception as e: - logging.exception("Exception processing message from SSE Spot Provider (" + self.name + ")") + logging.exception( + "Exception processing message from SSE Spot Provider (" + self.name + ")") except Exception as e: self.status = "Error" @@ -71,7 +72,8 @@ class SSESpotProvider(SpotProvider): self.status = "Disconnected" sleep(5) # Wait before trying to reconnect - # Convert an SSE message received from the API into a spot. The whole message data is provided here so the subclass - # implementations can handle the message as JSON, XML, text, whatever the API actually provides. def sse_message_to_spot(self, message_data): - raise NotImplementedError("Subclasses must implement this method") \ No newline at end of file + """Convert an SSE message received from the API into a spot. The whole message data is provided here so the subclass + implementations can handle the message as JSON, XML, text, whatever the API actually provides.""" + + raise NotImplementedError("Subclasses must implement this method") diff --git a/spotproviders/ukpacketnet.py b/spotproviders/ukpacketnet.py index 0ee734a..369dde0 100644 --- a/spotproviders/ukpacketnet.py +++ b/spotproviders/ukpacketnet.py @@ -7,8 +7,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for UK Packet Radio network API class UKPacketNet(HTTPSpotProvider): + """Spot provider for UK Packet Radio network API""" + POLL_INTERVAL_SEC = 600 SPOTS_URL = "https://nodes.ukpacketradio.network/api/nodedata" @@ -35,20 +36,26 @@ class UKPacketNet(HTTPSpotProvider): # First build a "full" comment combining some of the extra info comment = listed_port["comment"] if "comment" in listed_port else "" comment = (comment + " " + listed_port["mode"]) if "mode" in listed_port else comment - comment = (comment + " " + listed_port["modulation"]) if "modulation" in listed_port else comment - comment = (comment + " " + str(listed_port["baud"]) + " baud") if "baud" in listed_port and listed_port["baud"] > 0 else comment + comment = (comment + " " + listed_port[ + "modulation"]) if "modulation" in listed_port else comment + comment = (comment + " " + str( + listed_port["baud"]) + " baud") if "baud" in listed_port and listed_port[ + "baud"] > 0 else comment # Get frequency from the comment if it's not set properly in the data structure. This is # very hacky but a lot of node comments contain their frequency as the first or second # word of their comment, but not in the proper data structure field. - freq = listed_port["freq"] if "freq" in listed_port and listed_port["freq"] > 0 else None + freq = listed_port["freq"] if "freq" in listed_port and listed_port[ + "freq"] > 0 else None if not freq and comment: possible_freq = comment.split(" ")[0].upper().replace("MHZ", "") - if re.match(r"^[0-9.]+$", possible_freq) and possible_freq != "1200" and possible_freq != "9600": + if re.match(r"^[0-9.]+$", + possible_freq) and possible_freq != "1200" and possible_freq != "9600": freq = float(possible_freq) * 1000000 if not freq and len(comment.split(" ")) > 1: possible_freq = comment.split(" ")[1].upper().replace("MHZ", "") - if re.match(r"^[0-9.]+$", possible_freq) and possible_freq != "1200" and possible_freq != "9600": + if re.match(r"^[0-9.]+$", + possible_freq) and possible_freq != "1200" and possible_freq != "9600": freq = float(possible_freq) * 1000000 # Check for a found frequency likely having been in kHz, sorry to all GHz packet folks if freq and freq > 1000000000: @@ -61,8 +68,10 @@ class UKPacketNet(HTTPSpotProvider): freq=freq, mode="PKT", comment=comment, - time=datetime.strptime(heard["lastHeard"], "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC).timestamp(), - de_grid=node["location"]["locator"] if "locator" in node["location"] else None, + time=datetime.strptime(heard["lastHeard"], "%Y-%m-%d %H:%M:%S").replace( + tzinfo=pytz.UTC).timestamp(), + de_grid=node["location"]["locator"] if "locator" in node[ + "location"] else None, de_latitude=node["location"]["coords"]["lat"], de_longitude=node["location"]["coords"]["lon"]) @@ -77,7 +86,8 @@ class UKPacketNet(HTTPSpotProvider): # data, and we can use that to look these up. for spot in new_spots: if spot.dx_call in nodes: - spot.dx_grid = nodes[spot.dx_call]["location"]["locator"] if "locator" in nodes[spot.dx_call]["location"] else None + spot.dx_grid = nodes[spot.dx_call]["location"]["locator"] if "locator" in nodes[spot.dx_call][ + "location"] else None spot.dx_latitude = nodes[spot.dx_call]["location"]["coords"]["lat"] spot.dx_longitude = nodes[spot.dx_call]["location"]["coords"]["lon"] diff --git a/spotproviders/websocket_spot_provider.py b/spotproviders/websocket_spot_provider.py index 822e37a..377dcd3 100644 --- a/spotproviders/websocket_spot_provider.py +++ b/spotproviders/websocket_spot_provider.py @@ -10,8 +10,8 @@ from core.constants import HTTP_HEADERS from spotproviders.spot_provider import SpotProvider -# Spot provider using websockets. class WebsocketSpotProvider(SpotProvider): + """Spot provider using websockets.""" def __init__(self, provider_config, url): super().__init__(provider_config) @@ -60,7 +60,8 @@ class WebsocketSpotProvider(SpotProvider): logging.debug("Received data from " + self.name + " spot API.") except Exception as e: - logging.exception("Exception processing message from Websocket Spot Provider (" + self.name + ")") + logging.exception( + "Exception processing message from Websocket Spot Provider (" + self.name + ")") except Exception as e: self.status = "Error" @@ -69,7 +70,8 @@ class WebsocketSpotProvider(SpotProvider): self.status = "Disconnected" sleep(5) # Wait before trying to reconnect - # Convert a WS message received from the API into a spot. The exact message data (in bytes) is provided here so the - # subclass implementations can handle the message as string, JSON, XML, whatever the API actually provides. def ws_message_to_spot(self, bytes): - raise NotImplementedError("Subclasses must implement this method") \ No newline at end of file + """Convert a WS message received from the API into a spot. The exact message data (in bytes) is provided here so the + subclass implementations can handle the message as string, JSON, XML, whatever the API actually provides.""" + + raise NotImplementedError("Subclasses must implement this method") diff --git a/spotproviders/wota.py b/spotproviders/wota.py index 1c1bb12..34850a6 100644 --- a/spotproviders/wota.py +++ b/spotproviders/wota.py @@ -10,8 +10,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for Wainwrights on the Air class WOTA(HTTPSpotProvider): + """Spot provider for Wainwrights on the Air""" + POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://www.wota.org.uk/spots_rss.php" LIST_URL = "https://www.wota.org.uk/mapping/data/summits.json" diff --git a/spotproviders/wwbota.py b/spotproviders/wwbota.py index d2d8d11..90a53b1 100644 --- a/spotproviders/wwbota.py +++ b/spotproviders/wwbota.py @@ -6,8 +6,9 @@ from data.spot import Spot from spotproviders.sse_spot_provider import SSESpotProvider -# Spot provider for Worldwide Bunkers on the Air class WWBOTA(SSESpotProvider): + """Spot provider for Worldwide Bunkers on the Air""" + SPOTS_URL = "https://api.wwbota.net/spots/" def __init__(self, provider_config): diff --git a/spotproviders/wwff.py b/spotproviders/wwff.py index f0bae48..26b3895 100644 --- a/spotproviders/wwff.py +++ b/spotproviders/wwff.py @@ -7,8 +7,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for Worldwide Flora & Fauna class WWFF(HTTPSpotProvider): + """Spot provider for Worldwide Flora & Fauna""" + POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://spots.wwff.co/static/spots.json" @@ -36,4 +37,4 @@ class WWFF(HTTPSpotProvider): # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # that for us. new_spots.append(spot) - return new_spots \ No newline at end of file + return new_spots diff --git a/spotproviders/wwtota.py b/spotproviders/wwtota.py index e4486e9..45581bd 100644 --- a/spotproviders/wwtota.py +++ b/spotproviders/wwtota.py @@ -1,15 +1,15 @@ from datetime import datetime import json -import pytz from data.sig_ref import SIGRef from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for Towers on the Air class WWTOTA(HTTPSpotProvider): + """Spot provider for Towers on the Air""" + POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://wwtota.com/api/cluster_live.php" @@ -33,9 +33,10 @@ class WWTOTA(HTTPSpotProvider): comment=source_spot["comment"], sig="WWTOTA", sig_refs=[SIGRef(id=source_spot["ref"], sig="WWTOTA")], - time=datetime.strptime(response_json["updated"][:10] + source_spot["time"], "%Y-%m-%d%H:%M").timestamp()) + time=datetime.strptime(response_json["updated"][:10] + source_spot["time"], + "%Y-%m-%d%H:%M").timestamp()) # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # that for us. new_spots.append(spot) - return new_spots \ No newline at end of file + return new_spots diff --git a/spotproviders/xota.py b/spotproviders/xota.py index b6fbfc6..6ec0f0e 100644 --- a/spotproviders/xota.py +++ b/spotproviders/xota.py @@ -10,12 +10,13 @@ from data.spot import Spot from spotproviders.websocket_spot_provider import WebsocketSpotProvider -# Spot provider for servers based on the "xOTA" software at https://github.com/nischu/xOTA/ -# The provider typically doesn't give us a lat/lon or SIG explicitly, so our own config provides a SIG and a reference -# to a local CSV file with location information. This functionality is implemented for TOTA events, of which there are -# several - so a plain lookup of a "TOTA reference" doesn't make sense, it depends on which TOTA and hence which server -# supplied the data, which is why the CSV location lookup is here and not in sig_utils. class XOTA(WebsocketSpotProvider): + """Spot provider for servers based on the "xOTA" software at https://github.com/nischu/xOTA/ + The provider typically doesn't give us a lat/lon or SIG explicitly, so our own config provides a SIG and a reference + to a local CSV file with location information. This functionality is implemented for TOTA events, of which there are + several - so a plain lookup of a "TOTA reference" doesn't make sense, it depends on which TOTA and hence which server + supplied the data, which is why the CSV location lookup is here and not in sig_utils.""" + LOCATION_DATA = {} SIG = None @@ -47,7 +48,8 @@ class XOTA(WebsocketSpotProvider): freq=float(source_spot["freq"]) * 1000, mode=source_spot["mode"].upper(), sig=self.SIG, - sig_refs=[SIGRef(id=ref_id, sig=self.SIG, url=source_spot["reference"]["website"], latitude=lat, longitude=lon)], + sig_refs=[SIGRef(id=ref_id, sig=self.SIG, url=source_spot["reference"]["website"], latitude=lat, + longitude=lon)], time=datetime.now(pytz.UTC).timestamp(), dx_latitude=lat, dx_longitude=lon, diff --git a/spotproviders/zlota.py b/spotproviders/zlota.py index 62103d3..81c253c 100644 --- a/spotproviders/zlota.py +++ b/spotproviders/zlota.py @@ -7,8 +7,9 @@ from data.spot import Spot from spotproviders.http_spot_provider import HTTPSpotProvider -# Spot provider for ZLOTA class ZLOTA(HTTPSpotProvider): + """Spot provider for ZLOTA""" + POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://ontheair.nz/api/spots?zlota_only=true" LIST_URL = "https://ontheair.nz/assets/assets.json" @@ -35,7 +36,8 @@ class ZLOTA(HTTPSpotProvider): comment=source_spot["comments"], sig="ZLOTA", sig_refs=[SIGRef(id=source_spot["reference"], sig="ZLOTA", name=source_spot["name"])], - time=datetime.fromisoformat(source_spot["referenced_time"].replace("Z", "+00:00")).astimezone(pytz.UTC).timestamp()) + time=datetime.fromisoformat(source_spot["referenced_time"].replace("Z", "+00:00")).astimezone( + pytz.UTC).timestamp()) new_spots.append(spot) return new_spots diff --git a/templates/about.html b/templates/about.html index 7f8cd6a..80ed857 100644 --- a/templates/about.html +++ b/templates/about.html @@ -66,7 +66,7 @@
This software is dedicated to the memory of Tom G1PJB, SK, a friend and colleague who sadly passed away around the time I started writing it in Autumn 2025. I was looking forward to showing it to you when it was done.
- + {% end %} \ No newline at end of file diff --git a/templates/add_spot.html b/templates/add_spot.html index a43e723..b2d3184 100644 --- a/templates/add_spot.html +++ b/templates/add_spot.html @@ -69,8 +69,8 @@ - - + + {% end %} \ No newline at end of file diff --git a/templates/alerts.html b/templates/alerts.html index 27b132d..2ac62cb 100644 --- a/templates/alerts.html +++ b/templates/alerts.html @@ -56,8 +56,8 @@ - - + + {% end %} \ No newline at end of file diff --git a/templates/bands.html b/templates/bands.html index 8a8789b..d569572 100644 --- a/templates/bands.html +++ b/templates/bands.html @@ -62,9 +62,9 @@ - - - + + + {% end %} \ No newline at end of file diff --git a/templates/base.html b/templates/base.html index 0abd067..460358b 100644 --- a/templates/base.html +++ b/templates/base.html @@ -46,10 +46,10 @@ crossorigin="anonymous"> - - - - + + + + diff --git a/templates/map.html b/templates/map.html index 8865df9..af19949 100644 --- a/templates/map.html +++ b/templates/map.html @@ -70,9 +70,9 @@ - - - + + + {% end %} \ No newline at end of file diff --git a/templates/spots.html b/templates/spots.html index 7895aaf..6fcc90f 100644 --- a/templates/spots.html +++ b/templates/spots.html @@ -87,9 +87,9 @@ - - - + + + {% end %} \ No newline at end of file diff --git a/templates/status.html b/templates/status.html index 8863f4e..0fec460 100644 --- a/templates/status.html +++ b/templates/status.html @@ -3,8 +3,8 @@ - - + + {% end %} \ No newline at end of file diff --git a/webassets/img/flags/generate.py b/webassets/img/flags/generate.py index 7e90325..29d16dc 100644 --- a/webassets/img/flags/generate.py +++ b/webassets/img/flags/generate.py @@ -17,9 +17,10 @@ for dxcc in data["dxcc"]: flag = dxcc["flag"] image = Image.new("RGBA", (140, 110), (255, 0, 0, 0)) draw = ImageDraw.Draw(image) - draw.text((0, -10), flag, font=ImageFont.truetype("/usr/share/fonts/truetype/noto/NotoColorEmoji.ttf", 109), embedded_color=True) + draw.text((0, -10), flag, font=ImageFont.truetype("/usr/share/fonts/truetype/noto/NotoColorEmoji.ttf", 109), + embedded_color=True) outfile = str(id) + ".png" image.save(outfile, "PNG") image = Image.new("RGBA", (140, 110), (255, 0, 0, 0)) -image.save("999.png", "PNG") \ No newline at end of file +image.save("999.png", "PNG")