diff --git a/.gitignore b/.gitignore index 6fe7c65..4919d7b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ /.venv __pycache__ *.pyc +/.alerts_cache /.spots_cache /.qrz_callsign_lookup_cache /sota_summit_data_cache.sqlite diff --git a/README.md b/README.md index d4577e8..e7c3180 100644 --- a/README.md +++ b/README.md @@ -145,7 +145,8 @@ To navigate your way around the source code, this list may help. * `/core` - Core classes and scripts * `/data` - Data storage classes -* `/providers` - Classes providing data by accessing the APIs of other services +* `/spotproviders` - Classes providing spots by accessing the APIs of other services +* `/alertproviders` - Classes providing alerts by accessing the APIs of other services * `/server` - Classes for running Spothole's own web server *Templates* diff --git a/alertproviders/alert_provider.py b/alertproviders/alert_provider.py new file mode 100644 index 0000000..59e6e4d --- /dev/null +++ b/alertproviders/alert_provider.py @@ -0,0 +1,43 @@ +from datetime import datetime + +import pytz + +from core.config import SERVER_OWNER_CALLSIGN, MAX_ALERT_AGE +from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION + + +# Generic alert provider class. Subclasses of this query the individual APIs for alerts. +class AlertProvider: + + # HTTP headers used for spot providers that use HTTP + HTTP_HEADERS = { "User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION + " (operated by " + SERVER_OWNER_CALLSIGN + ")" } + + # Constructor + def __init__(self, provider_config): + self.name = provider_config["name"] + self.enabled = provider_config["enabled"] + self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC) + self.status = "Not Started" if self.enabled else "Disabled" + self.alerts = None + + # Set up the provider, e.g. giving it the alert list to work from + def setup(self, alerts): + self.alerts = alerts + + # Start the provider. This should return immediately after spawning threads to access the remote resources + def start(self): + raise NotImplementedError("Subclasses must implement this method") + + # Submit a batch of alerts retrieved from the provider. There is no timestamp checking like there is for spots, + # because alerts could be created at any point for any time in the future. Rely on hashcode-based id matching + # to deal with duplicates. + def submit_batch(self, alerts): + for alert in alerts: + # Fill in any blanks + alert.infer_missing() + # Add to the list + self.alerts.add(alert.id, alert, expire=MAX_ALERT_AGE) + + # Stop any threads and prepare for application shutdown + def stop(self): + raise NotImplementedError("Subclasses must implement this method") \ No newline at end of file diff --git a/alertproviders/http_alert_provider.py b/alertproviders/http_alert_provider.py new file mode 100644 index 0000000..f1ee6ae --- /dev/null +++ b/alertproviders/http_alert_provider.py @@ -0,0 +1,61 @@ +import logging +from datetime import datetime +from threading import Timer, Thread +from time import sleep + +import pytz +import requests + +from alertproviders.alert_provider import AlertProvider + + +# Generic alert provider class for providers that request data via HTTP(S). Just for convenience to avoid code +# duplication. Subclasses of this query the individual APIs for data. +class HTTPAlertProvider(AlertProvider): + + def __init__(self, provider_config, url, poll_interval): + super().__init__(provider_config) + self.url = url + self.poll_interval = poll_interval + self.poll_timer = None + + def start(self): + # Fire off a one-shot thread to run poll() for the first time, just to ensure start() returns immediately and + # the application can continue starting. The thread itself will then die, and the timer will kick in on its own + # thread. + logging.info("Set up query of " + self.name + " alert API every " + str(self.poll_interval) + " seconds.") + thread = Thread(target=self.poll) + thread.daemon = True + thread.start() + + def stop(self): + self.poll_timer.cancel() + + def poll(self): + try: + # Request data from API + logging.debug("Polling " + self.name + " alert API...") + http_response = requests.get(self.url, headers=self.HTTP_HEADERS) + # Pass off to the subclass for processing + new_alerts = self.http_response_to_alerts(http_response) + # Submit the new alerts for processing. There might not be any alerts for the less popular programs. + if new_alerts: + self.submit_batch(new_alerts) + + self.status = "OK" + self.last_update_time = datetime.now(pytz.UTC) + logging.debug("Received data from " + self.name + " alert API.") + + except Exception as e: + self.status = "Error" + logging.exception("Exception in HTTP JSON Alert Provider (" + self.name + ")") + sleep(1) + + self.poll_timer = Timer(self.poll_interval, self.poll) + self.poll_timer.start() + + # Convert an HTTP response returned by the API into alert data. The whole response is provided here so the subclass + # implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever + # the API actually provides. + def http_response_to_alerts(self, http_response): + raise NotImplementedError("Subclasses must implement this method") \ No newline at end of file diff --git a/alertproviders/pota.py b/alertproviders/pota.py new file mode 100644 index 0000000..fb47c95 --- /dev/null +++ b/alertproviders/pota.py @@ -0,0 +1,39 @@ +from datetime import datetime + +import pytz + +from alertproviders.http_alert_provider import HTTPAlertProvider +from data.alert import Alert + + +# Alert provider for Parks on the Air +class POTA(HTTPAlertProvider): + POLL_INTERVAL_SEC = 3600 + ALERTS_URL = "https://api.pota.app/activation" + + def __init__(self, provider_config): + super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC) + + def http_response_to_alerts(self, http_response): + new_alerts = [] + # Iterate through source data + for source_alert in http_response.json(): + # Convert to our alert format + alert = Alert(source=self.name, + source_id=source_alert["scheduledActivitiesId"], + dx_call=source_alert["activator"].upper(), + freqs_modes=source_alert["frequencies"], + comment=source_alert["comments"], + sig="POTA", + sig_refs=[source_alert["reference"]], + sig_refs_names=[source_alert["name"]], + icon="tree", + start_time=datetime.strptime(source_alert["startDate"] + source_alert["startTime"], + "%Y-%m-%d%H:%M").replace(tzinfo=pytz.UTC).timestamp(), + end_time=datetime.strptime(source_alert["endDate"] + source_alert["endTime"], + "%Y-%m-%d%H:%M").replace(tzinfo=pytz.UTC).timestamp()) + + # Add to our list. Don't worry about de-duping, removing old alerts etc. at this point; other code will do + # that for us. + new_alerts.append(alert) + return new_alerts diff --git a/config-example.yml b/config-example.yml index cf810fb..180d3d2 100644 --- a/config-example.yml +++ b/config-example.yml @@ -6,13 +6,13 @@ # this as "N0CALL" and it shouldn't do any harm, as we're not sending anything to the various networks, only receiving. server-owner-callsign: "N0CALL" -# Data providers to use. This is an example set, tailor it to your liking by commenting and uncommenting. +# Spot providers to use. This is an example set, tailor it to your liking by commenting and uncommenting. # RBN and APRS-IS are supported but have such a high data rate, you probably don't want them enabled. # Each provider needs a class, a name, and an enabled/disabled state. Some require more config such as hostnames/IP # addresses and ports. You can duplicate them if you like, e.g. to support several DX clusters. RBN uses two ports, 7000 # for CW/RTTY and 7001 for FT8, so if you want both, you need two entries, as shown below. # Feel free to write your own provider classes! There are details in the README. -providers: +spot-providers: - class: "POTA" name: "POTA" @@ -62,11 +62,20 @@ providers: enabled: false port: 7001 +# Alert providers to use. Same setup as the spot providers list above. +alert-providers: + - + class: "POTA" + name: "POTA" + enabled: true + # Port to open the local web server on web-server-port: 8080 -# Maximum spot age to keep in the system before deleting it +# Maximum time to keep spots and alerts in the system before deleting them. By default, one hour for spots and one week +# for alerts. max-spot-age-sec: 3600 +max-alert-age-sec: 604800 # Login for QRZ.com to look up information. Optional. qrz-username: "N0CALL" diff --git a/core/config.py b/core/config.py index 874702e..6cb0ad6 100644 --- a/core/config.py +++ b/core/config.py @@ -13,6 +13,7 @@ config = yaml.safe_load(open("config.yml")) logging.info("Loaded config.") MAX_SPOT_AGE = config["max-spot-age-sec"] +MAX_ALERT_AGE = config["max-alert-age-sec"] SERVER_OWNER_CALLSIGN = config["server-owner-callsign"] WEB_SERVER_PORT = config["web-server-port"] ALLOW_SPOTTING = config["allow-spotting"] \ No newline at end of file diff --git a/core/status_reporter.py b/core/status_reporter.py new file mode 100644 index 0000000..621cfec --- /dev/null +++ b/core/status_reporter.py @@ -0,0 +1,59 @@ +import os +from datetime import datetime +from threading import Timer + +import psutil +import pytz + +from core.config import SERVER_OWNER_CALLSIGN +from core.constants import SOFTWARE_VERSION + + +# Provides a timed update of the application's status data. +class StatusReporter: + + # Constructor + def __init__(self, status_data, run_interval, web_server, cleanup_timer, spots, spot_providers, alerts, + alert_providers): + self.status_data = status_data + self.run_interval = run_interval + self.web_server = web_server + self.cleanup_timer = cleanup_timer + self.spots = spots + self.spot_providers = spot_providers + self.alerts = alerts + self.alert_providers = alert_providers + self.run_timer = None + self.startup_time = datetime.now(pytz.UTC) + + self.status_data["software-version"] = SOFTWARE_VERSION + self.status_data["server-owner-callsign"] = SERVER_OWNER_CALLSIGN + + # Start the cleanup timer + def start(self): + self.run() + + # Stop any threads and prepare for application shutdown + def stop(self): + self.run_timer.cancel() + + # Write status information and reschedule next timer + def run(self): + self.status_data["uptime"] = str(datetime.now(pytz.UTC) - self.startup_time).split(".")[0] + self.status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3) + self.status_data["num_spots"] = len(self.spots) + self.status_data["num_alerts"] = len(self.alerts) + self.status_data["spot_providers"] = list( + map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, "last_updated": p.last_update_time, + "last_spot": p.last_spot_time}, self.spot_providers)) + self.status_data["alert_providers"] = list( + map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, + "last_updated": p.last_update_time}, self.alert_providers)) + self.status_data["cleanup"] = {"status": self.cleanup_timer.status, + "last_ran": self.cleanup_timer.last_cleanup_time} + self.status_data["webserver"] = {"status": self.web_server.status, + "last_api_access": self.web_server.last_api_access_time, + "last_page_access": self.web_server.last_page_access_time} + + self.run_timer = Timer(self.run_interval, self.run) + self.run_timer.start() diff --git a/data/alert.py b/data/alert.py new file mode 100644 index 0000000..d628450 --- /dev/null +++ b/data/alert.py @@ -0,0 +1,111 @@ +import json +from dataclasses import dataclass +from datetime import datetime + +import pytz + +from core.constants import DXCC_FLAGS +from core.utils import infer_continent_from_callsign, \ + infer_country_from_callsign, infer_cq_zone_from_callsign, infer_itu_zone_from_callsign, infer_dxcc_id_from_callsign, \ + infer_name_from_callsign + + +# Data class that defines an alert. +@dataclass +class Alert: + # Unique identifier for the alert + id: int = None + # Callsign of the operator that has been alertted + dx_call: str = None + # Name of the operator that has been alertted + dx_name: str = None + # Country of the DX operator + dx_country: str = None + # Country flag of the DX operator + dx_flag: str = None + # Continent of the DX operator + dx_continent: str = None + # DXCC ID of the DX operator + dx_dxcc_id: int = None + # CQ zone of the DX operator + dx_cq_zone: int = None + # ITU zone of the DX operator + dx_itu_zone: int = None + # Intended frequencies & modes of operation. Essentially just a different kind of comment field. + freqs_modes: str = None + # Start time of the activation, UTC seconds since UNIX epoch + start_time: float = None + # Start time of the activation of the alert, ISO 8601 + start_time_iso: str = None + # End time of the activation, UTC seconds since UNIX epoch. Optional + end_time: float = None + # End time of the activation of the alert, ISO 8601 + end_time_iso: str = None + # Time that this software received the alert, UTC seconds since UNIX epoch. This is used with the "since_received" + # call to our API to receive all data that is new to us, even if by a quirk of the API it might be older than the + # list time the client polled the API. + received_time: float = None + # Time that this software received the alert, ISO 8601 + received_time_iso: str = None + # Comment made by the alerter, if any + comment: str = None + # Special Interest Group (SIG), e.g. outdoor activity programme such as POTA + sig: str = None + # SIG references. We allow multiple here for e.g. n-fer activations, unlike ADIF SIG_INFO + sig_refs: list = None + # SIG reference names + sig_refs_names: list = None + # Activation score. SOTA only + activation_score: int = None + # Icon, from the Font Awesome set. This is fairly opinionated but is here to help the alerthole web UI and Field alertter. Does not include the "fa-" prefix. + icon: str = "question" + # Where we got the alert from, e.g. "POTA", "SOTA"... + source: str = None + # The ID the source gave it, if any. + source_id: str = None + + # Infer missing parameters where possible + def infer_missing(self): + # If we somehow don't have a start time, set it to zero so it sorts off the bottom of any list but + # clients can still reliably parse it as a number. + if not self.start_time: + self.start_time = 0 + + # If we don't have a received time, this has just been received so set that to "now" + if not self.received_time: + self.received_time = datetime.now(pytz.UTC).timestamp() + + # Fill in ISO versions of times, in case the client prefers that + if self.start_time and not self.start_time_iso: + self.start_time_iso = datetime.fromtimestamp(self.start_time, pytz.UTC).isoformat() + if self.end_time and not self.end_time_iso: + self.end_time_iso = datetime.fromtimestamp(self.end_time, pytz.UTC).isoformat() + if self.received_time and not self.received_time_iso: + self.received_time_iso = datetime.fromtimestamp(self.received_time, pytz.UTC).isoformat() + + # DX country, continent, zones etc. from callsign + if self.dx_call and not self.dx_country: + self.dx_country = infer_country_from_callsign(self.dx_call) + if self.dx_call and not self.dx_continent: + self.dx_continent = infer_continent_from_callsign(self.dx_call) + if self.dx_call and not self.dx_cq_zone: + self.dx_cq_zone = infer_cq_zone_from_callsign(self.dx_call) + if self.dx_call and not self.dx_itu_zone: + self.dx_itu_zone = infer_itu_zone_from_callsign(self.dx_call) + if self.dx_call and not self.dx_dxcc_id: + self.dx_dxcc_id = infer_dxcc_id_from_callsign(self.dx_call) + if self.dx_dxcc_id and not self.dx_flag: + self.dx_flag = DXCC_FLAGS[self.dx_dxcc_id] + + # DX operator details lookup, using QRZ.com. This should be the last resort compared to taking the data from + # the actual alertting service, e.g. we don't want to accidentally use a user's QRZ.com home lat/lon instead of + # the one from the park reference they're at. + if self.dx_call and not self.dx_name: + self.dx_name = infer_name_from_callsign(self.dx_call) + + # Always create an ID based on a hashcode + self.id = hash(str(self)) + + # JSON serialise + def to_json(self): + return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True) \ No newline at end of file diff --git a/data/spot.py b/data/spot.py index 9a6a3f0..11294bb 100644 --- a/data/spot.py +++ b/data/spot.py @@ -1,5 +1,4 @@ import json -import uuid from dataclasses import dataclass from datetime import datetime @@ -16,8 +15,8 @@ from core.utils import infer_mode_type_from_mode, infer_band_from_freq, infer_co # Data class that defines a spot. @dataclass class Spot: - # Globally unique identifier for the spot - guid: str = None + # Unique identifier for the spot + id: int = None # Callsign of the operator that has been spotted dx_call: str = None # Callsign of the operator that has spotted them @@ -97,9 +96,6 @@ class Spot: # Infer missing parameters where possible def infer_missing(self): - # Always create a GUID - self.guid = str(uuid.uuid4()) - # If we somehow don't have a spot time, set it to zero so it sorts off the bottom of any list but # clients can still reliably parse it as a number. if not self.time: @@ -211,6 +207,9 @@ class Spot: # is likely at home. self.location_good = self.location_source == "SPOT" or (self.location_source == "QRZ" and not "/" in self.dx_call) + # Always create an ID based on a hashcode + self.id = hash(str(self)) + # JSON serialise def to_json(self): return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True) \ No newline at end of file diff --git a/server/webserver.py b/server/webserver.py index f73bc0e..e29df7b 100644 --- a/server/webserver.py +++ b/server/webserver.py @@ -2,7 +2,6 @@ import json import logging from datetime import datetime, timedelta from threading import Thread -from types import SimpleNamespace import bottle import pytz @@ -30,11 +29,13 @@ class WebServer: # Routes for API calls bottle.get("/api/spots")(lambda: self.serve_api(self.get_spot_list_with_filters())) + bottle.get("/api/alerts")(lambda: self.serve_api(self.get_alert_list_with_filters())) bottle.get("/api/options")(lambda: self.serve_api(self.get_options())) bottle.get("/api/status")(lambda: self.serve_api(self.status_data)) bottle.post("/api/spot")(lambda: self.accept_spot()) # Routes for templated pages bottle.get("/")(lambda: self.serve_template('webpage_spots')) + bottle.get("/alerts")(lambda: self.serve_template('webpage_alerts')) bottle.get("/about")(lambda: self.serve_template('webpage_about')) bottle.get("/apidocs")(lambda: self.serve_template('webpage_apidocs')) # Default route to serve from "webassets" @@ -90,7 +91,7 @@ class WebServer: spot.source = "API" spot.icon = "desktop" spot.infer_missing() - self.spots.add(spot.guid, spot, expire=MAX_SPOT_AGE) + self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) response.content_type = 'application/json' response.set_header('Cache-Control', 'no-store') @@ -114,7 +115,7 @@ class WebServer: # Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in # the main "spots" GET call. def get_spot_list_with_filters(self): - # Get the query (and the right one, with Bottle magic. This is a MultiDict object + # Get the query (and the right one, with Bottle magic. This is a MultiDict object) query = bottle.request.query # Create a shallow copy of the spot list, ordered by spot time. We'll then filter it accordingly. @@ -124,9 +125,9 @@ class WebServer: # value or a comma-separated list. # We can provide a "limit" number as well. Spots are always returned newest-first; "limit" limits to only the # most recent X spots. - spot_guids = list(self.spots.iterkeys()) + spot_ids = list(self.spots.iterkeys()) spots = [] - for k in spot_guids: + for k in spot_ids: spots.append(self.spots.get(k)) spots = sorted(spots, key=lambda spot: spot.time, reverse=True) for k in query.keys(): @@ -167,6 +168,43 @@ class WebServer: spots = spots[:int(query.get("limit"))] return spots + + # Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in + # the main "alerts" GET call. + def get_alert_list_with_filters(self): + # Get the query (and the right one, with Bottle magic. This is a MultiDict object) + query = bottle.request.query + + # Create a shallow copy of the alert list, ordered by alert time. We'll then filter it accordingly. + # We can filter by received time with "received_since", which take a UNIX timestamp in seconds UTC. + # We can also filter by source, sig, and dx_continent. Each of these accepts a single + # value or a comma-separated list. + # We can provide a "limit" number as well. Alerts are always returned newest-first; "limit" limits to only the + # most recent X alerts. + alert_ids = list(self.spots.iterkeys()) + alerts = [] + for k in alert_ids: + alerts.append(self.spots.get(k)) + alerts = sorted(alerts, key=lambda spot: spot.time, reverse=True) + for k in query.keys(): + match k: + case "received_since": + since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC) + alerts = [s for s in alerts if s.received_time > since] + case "source": + sources = query.get(k).split(",") + alerts = [s for s in alerts if s.source in sources] + case "sig": + sigs = query.get(k).split(",") + alerts = [s for s in alerts if s.sig in sigs] + case "dx_continent": + dxconts = query.get(k).split(",") + alerts = [s for s in alerts if s.dx_continent in dxconts] + # If we have a "limit" parameter, we apply that last, regardless of where it appeared in the list of keys. + if "limit" in query.keys(): + alerts = alerts[:int(query.get("limit"))] + return alerts + # Return all the "options" for various things that the server is aware of. This can be fetched with an API call. # The idea is that this will include most of the things that can be provided as queries to the main spots call, # and thus a client can use this data to configure its filter controls. @@ -175,7 +213,8 @@ class WebServer: "modes": ALL_MODES, "mode_types": MODE_TYPES, "sigs": SIGS, - # Sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. - "sources": list(map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["providers"]))), + # Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. + "spot_sources": list(map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), + "alert_sources": list(map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), "continents": CONTINENTS, "max_spot_age": MAX_SPOT_AGE} diff --git a/spothole.py b/spothole.py index c4584c6..5a85444 100644 --- a/spothole.py +++ b/spothole.py @@ -1,44 +1,50 @@ # Main script +import importlib import logging -import os import signal import sys -from datetime import datetime -from time import sleep -import importlib -import psutil -import pytz from diskcache import Cache from core.cleanup import CleanupTimer -from core.config import config, MAX_SPOT_AGE, WEB_SERVER_PORT, SERVER_OWNER_CALLSIGN -from core.constants import SOFTWARE_VERSION +from core.config import config, WEB_SERVER_PORT +from core.status_reporter import StatusReporter from core.utils import QRZ_CALLSIGN_DATA_CACHE from server.webserver import WebServer # Globals spots = Cache('.spots_cache') +alerts = Cache('.alerts_cache') status_data = {} -providers = [] +spot_providers = [] +alert_providers = [] cleanup_timer = None -run = True + # Shutdown function def shutdown(sig, frame): logging.info("Stopping program, this may take a few seconds...") - global run - run = False - for p in providers: + for p in spot_providers: + if p.enabled: + p.stop() + for p in alert_providers: if p.enabled: p.stop() cleanup_timer.stop() QRZ_CALLSIGN_DATA_CACHE.close() spots.close() -# Utility method to get a data provider based on the class specified in its config entry. -def get_provider_from_config(config_providers_entry): - module = importlib.import_module('providers.' + config_providers_entry["class"].lower()) + +# Utility method to get a spot provider based on the class specified in its config entry. +def get_spot_provider_from_config(config_providers_entry): + module = importlib.import_module('spotproviders.' + config_providers_entry["class"].lower()) + provider_class = getattr(module, config_providers_entry["class"]) + return provider_class(config_providers_entry) + + +# Utility method to get an alert provider based on the class specified in its config entry. +def get_alert_provider_from_config(config_providers_entry): + module = importlib.import_module('alertproviders.' + config_providers_entry["class"].lower()) provider_class = getattr(module, config_providers_entry["class"]) return provider_class(config_providers_entry) @@ -55,19 +61,23 @@ if __name__ == '__main__': root.addHandler(handler) logging.info("Starting...") - startup_time = datetime.now(pytz.UTC) - status_data["software-version"] = SOFTWARE_VERSION - status_data["server-owner-callsign"] = SERVER_OWNER_CALLSIGN # Shut down gracefully on SIGINT signal.signal(signal.SIGINT, shutdown) - for entry in config["providers"]: - providers.append(get_provider_from_config(entry)) - # Set up data providers - for p in providers: p.setup(spots=spots) - # Start data providers - for p in providers: + # Fetch, set up and start spot providers + for entry in config["spot-providers"]: + spot_providers.append(get_spot_provider_from_config(entry)) + for p in spot_providers: + p.setup(spots=spots) + if p.enabled: + p.start() + + # Fetch, set up and start alert providers + for entry in config["alert-providers"]: + alert_providers.append(get_alert_provider_from_config(entry)) + for p in alert_providers: + p.setup(alerts=alerts) if p.enabled: p.start() @@ -79,14 +89,10 @@ if __name__ == '__main__': web_server = WebServer(spots=spots, status_data=status_data, port=WEB_SERVER_PORT) web_server.start() - logging.info("Startup complete.") + # Set up status reporter + status_reporter = StatusReporter(status_data=status_data, spots=spots, alerts=alerts, web_server=web_server, + cleanup_timer=cleanup_timer, spot_providers=spot_providers, + alert_providers=alert_providers, run_interval=5) + status_reporter.start() - # While running, update the status information at a regular interval - while run: - sleep(5) - status_data["uptime"] = str(datetime.now(pytz.UTC) - startup_time).split(".")[0] - status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3) - status_data["num_spots"] = len(spots) - status_data["providers"] = list(map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status, "last_updated": p.last_update_time, "last_spot": p.last_spot_time}, providers)) - status_data["cleanup"] = {"status": cleanup_timer.status, "last_ran": cleanup_timer.last_cleanup_time} - status_data["webserver"] = {"status": web_server.status, "last_api_access": web_server.last_api_access_time, "last_page_access": web_server.last_page_access_time} + logging.info("Startup complete.") diff --git a/providers/aprsis.py b/spotproviders/aprsis.py similarity index 91% rename from providers/aprsis.py rename to spotproviders/aprsis.py index 43628ec..dd75b56 100644 --- a/providers/aprsis.py +++ b/spotproviders/aprsis.py @@ -7,11 +7,11 @@ import pytz from core.config import SERVER_OWNER_CALLSIGN from data.spot import Spot -from providers.provider import Provider +from spotproviders.spot_provider import SpotProvider -# Provider for the APRS-IS. -class APRSIS(Provider): +# Spot provider for the APRS-IS. +class APRSIS(SpotProvider): def __init__(self, provider_config): super().__init__(provider_config) @@ -52,7 +52,6 @@ class APRSIS(Provider): # Add to our list self.submit(spot) - print(spot) self.status = "OK" self.last_update_time = datetime.now(timezone.utc) diff --git a/providers/dxcluster.py b/spotproviders/dxcluster.py similarity index 94% rename from providers/dxcluster.py rename to spotproviders/dxcluster.py index ad71932..54fd092 100644 --- a/providers/dxcluster.py +++ b/spotproviders/dxcluster.py @@ -9,11 +9,11 @@ import telnetlib3 from data.spot import Spot from core.config import SERVER_OWNER_CALLSIGN -from providers.provider import Provider +from spotproviders.spot_provider import SpotProvider -# Provider for a DX Cluster. Hostname and port provided as parameters. -class DXCluster(Provider): +# Spot provider for a DX Cluster. Hostname and port provided as parameters. +class DXCluster(SpotProvider): CALLSIGN_PATTERN = "([a-z|0-9|/]+)" FREQUENCY_PATTERM = "([0-9|.]+)" LINE_PATTERN = re.compile( diff --git a/providers/gma.py b/spotproviders/gma.py similarity index 95% rename from providers/gma.py rename to spotproviders/gma.py index 13d9fff..b8b685a 100644 --- a/providers/gma.py +++ b/spotproviders/gma.py @@ -5,11 +5,11 @@ import pytz from requests_cache import CachedSession from data.spot import Spot -from providers.http_provider import HTTPProvider +from spotproviders.http_spot_provider import HTTPSpotProvider -# Provider for General Mountain Activity -class GMA(HTTPProvider): +# Spot provider for General Mountain Activity +class GMA(HTTPSpotProvider): POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://www.cqgma.org/api/spots/25/" # GMA spots don't contain the details of the programme they are for, we need a separate lookup for that @@ -51,7 +51,7 @@ class GMA(HTTPProvider): # spots come through with reftype=POTA or reftype=WWFF. SOTA is harder to figure out because both SOTA # and GMA summits come through with reftype=Summit, so we must check for the presence of a "sota" entry # to determine if it's a SOTA summit. - if ref_info["reftype"] not in ["POTA", "WWFF"] and (ref_info["reftype"] is not "Summit" or ref_info["sota"] is ""): + if ref_info["reftype"] not in ["POTA", "WWFF"] and (ref_info["reftype"] != "Summit" or ref_info["sota"] == ""): match ref_info["reftype"]: case "Summit": spot.sig = "GMA" diff --git a/providers/hema.py b/spotproviders/hema.py similarity index 95% rename from providers/hema.py rename to spotproviders/hema.py index 414a401..6718735 100644 --- a/providers/hema.py +++ b/spotproviders/hema.py @@ -6,11 +6,11 @@ import requests from requests_cache import CachedSession from data.spot import Spot -from providers.http_provider import HTTPProvider +from spotproviders.http_spot_provider import HTTPSpotProvider -# Provider for HuMPs Excluding Marilyns Award -class HEMA(HTTPProvider): +# Spot provider for HuMPs Excluding Marilyns Award +class HEMA(HTTPSpotProvider): POLL_INTERVAL_SEC = 300 # HEMA wants us to check for a "spot seed" from the API and see if it's actually changed before querying the main # data API. So it's actually the SPOT_SEED_URL that we pass into the constructor and get the superclass to call on a diff --git a/providers/http_provider.py b/spotproviders/http_spot_provider.py similarity index 82% rename from providers/http_provider.py rename to spotproviders/http_spot_provider.py index d31c3f2..b511004 100644 --- a/providers/http_provider.py +++ b/spotproviders/http_spot_provider.py @@ -6,12 +6,12 @@ from time import sleep import pytz import requests -from providers.provider import Provider +from spotproviders.spot_provider import SpotProvider -# Generic data provider class for providers that request data via HTTP(S). Just for convenience to avoid code +# Generic spot provider class for providers that request data via HTTP(S). Just for convenience to avoid code # duplication. Subclasses of this query the individual APIs for data. -class HTTPProvider(Provider): +class HTTPSpotProvider(SpotProvider): def __init__(self, provider_config, url, poll_interval): super().__init__(provider_config) @@ -23,7 +23,7 @@ class HTTPProvider(Provider): # Fire off a one-shot thread to run poll() for the first time, just to ensure start() returns immediately and # the application can continue starting. The thread itself will then die, and the timer will kick in on its own # thread. - logging.info("Set up query of " + self.name + " API every " + str(self.poll_interval) + " seconds.") + logging.info("Set up query of " + self.name + " spot API every " + str(self.poll_interval) + " seconds.") thread = Thread(target=self.poll) thread.daemon = True thread.start() @@ -34,7 +34,7 @@ class HTTPProvider(Provider): def poll(self): try: # Request data from API - logging.debug("Polling " + self.name + " API...") + logging.debug("Polling " + self.name + " spot API...") http_response = requests.get(self.url, headers=self.HTTP_HEADERS) # Pass off to the subclass for processing new_spots = self.http_response_to_spots(http_response) @@ -44,11 +44,11 @@ class HTTPProvider(Provider): self.status = "OK" self.last_update_time = datetime.now(pytz.UTC) - logging.debug("Received data from " + self.name + " API.") + logging.debug("Received data from " + self.name + " spot API.") except Exception as e: self.status = "Error" - logging.exception("Exception in HTTP JSON Provider (" + self.name + ")") + logging.exception("Exception in HTTP JSON Spot Provider (" + self.name + ")") sleep(1) self.poll_timer = Timer(self.poll_interval, self.poll) diff --git a/providers/parksnpeaks.py b/spotproviders/parksnpeaks.py similarity index 92% rename from providers/parksnpeaks.py rename to spotproviders/parksnpeaks.py index 471f9d9..88693fb 100644 --- a/providers/parksnpeaks.py +++ b/spotproviders/parksnpeaks.py @@ -4,11 +4,11 @@ from datetime import datetime import pytz from data.spot import Spot -from providers.http_provider import HTTPProvider +from spotproviders.http_spot_provider import HTTPSpotProvider -# Provider for Parks n Peaks -class ParksNPeaks(HTTPProvider): +# Spot provider for Parks n Peaks +class ParksNPeaks(HTTPSpotProvider): POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://www.parksnpeaks.org/api/ALL" diff --git a/providers/pota.py b/spotproviders/pota.py similarity index 91% rename from providers/pota.py rename to spotproviders/pota.py index cd51748..5a59b95 100644 --- a/providers/pota.py +++ b/spotproviders/pota.py @@ -3,11 +3,11 @@ from datetime import datetime import pytz from data.spot import Spot -from providers.http_provider import HTTPProvider +from spotproviders.http_spot_provider import HTTPSpotProvider -# Provider for Parks on the Air -class POTA(HTTPProvider): +# Spot provider for Parks on the Air +class POTA(HTTPSpotProvider): POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://api.pota.app/spot/activator" diff --git a/providers/rbn.py b/spotproviders/rbn.py similarity index 93% rename from providers/rbn.py rename to spotproviders/rbn.py index 25f1406..c42224a 100644 --- a/providers/rbn.py +++ b/spotproviders/rbn.py @@ -9,12 +9,12 @@ import telnetlib3 from data.spot import Spot from core.config import SERVER_OWNER_CALLSIGN -from providers.provider import Provider +from spotproviders.spot_provider import SpotProvider -# Provider for the Reverse Beacon Network. Connects to a single port, if you want both CW/RTTY (port 7000) and FT8 +# Spot provider for the Reverse Beacon Network. Connects to a single port, if you want both CW/RTTY (port 7000) and FT8 # (port 7001) you need to instantiate two copies of this. The port is provided as an argument to the constructor. -class RBN(Provider): +class RBN(SpotProvider): CALLSIGN_PATTERN = "([a-z|0-9|/]+)" FREQUENCY_PATTERM = "([0-9|.]+)" LINE_PATTERN = re.compile( diff --git a/providers/sota.py b/spotproviders/sota.py similarity index 95% rename from providers/sota.py rename to spotproviders/sota.py index f301095..6b5b674 100644 --- a/providers/sota.py +++ b/spotproviders/sota.py @@ -4,11 +4,11 @@ import requests from requests_cache import CachedSession from data.spot import Spot -from providers.http_provider import HTTPProvider +from spotproviders.http_spot_provider import HTTPSpotProvider -# Provider for Summits on the Air -class SOTA(HTTPProvider): +# Spot provider for Summits on the Air +class SOTA(HTTPSpotProvider): POLL_INTERVAL_SEC = 120 # SOTA wants us to check for an "epoch" from the API and see if it's actually changed before querying the main data # APIs. So it's actually the EPOCH_URL that we pass into the constructor and get the superclass to call on a timer. diff --git a/providers/provider.py b/spotproviders/spot_provider.py similarity index 87% rename from providers/provider.py rename to spotproviders/spot_provider.py index 3ecb8f6..7e0ab5e 100644 --- a/providers/provider.py +++ b/spotproviders/spot_provider.py @@ -3,13 +3,13 @@ from datetime import datetime import pytz from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION -from core.config import config, SERVER_OWNER_CALLSIGN, MAX_SPOT_AGE +from core.config import SERVER_OWNER_CALLSIGN, MAX_SPOT_AGE -# Generic data provider class. Subclasses of this query the individual APIs for data. -class Provider: +# Generic spot provider class. Subclasses of this query the individual APIs for data. +class SpotProvider: - # HTTP headers used for providers that use HTTP + # HTTP headers used for spot providers that use HTTP HTTP_HEADERS = { "User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION + " (operated by " + SERVER_OWNER_CALLSIGN + ")" } # Constructor @@ -39,7 +39,7 @@ class Provider: # Fill in any blanks spot.infer_missing() # Add to the list - self.spots.add(spot.guid, spot, expire=MAX_SPOT_AGE) + self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC) # Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots @@ -49,7 +49,7 @@ class Provider: # Fill in any blanks spot.infer_missing() # Add to the list - self.spots.add(spot.guid, spot, expire=MAX_SPOT_AGE) + self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) self.last_spot_time = datetime.fromtimestamp(spot.time, pytz.UTC) # Stop any threads and prepare for application shutdown diff --git a/providers/wwbota.py b/spotproviders/wwbota.py similarity index 92% rename from providers/wwbota.py rename to spotproviders/wwbota.py index d52c151..81c7bcf 100644 --- a/providers/wwbota.py +++ b/spotproviders/wwbota.py @@ -1,11 +1,11 @@ from datetime import datetime from data.spot import Spot -from providers.http_provider import HTTPProvider +from spotproviders.http_spot_provider import HTTPSpotProvider -# Provider for Worldwide Bunkers on the Air -class WWBOTA(HTTPProvider): +# Spot provider for Worldwide Bunkers on the Air +class WWBOTA(HTTPSpotProvider): POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://api.wwbota.org/spots/" diff --git a/providers/wwff.py b/spotproviders/wwff.py similarity index 90% rename from providers/wwff.py rename to spotproviders/wwff.py index acaa01f..32e01cf 100644 --- a/providers/wwff.py +++ b/spotproviders/wwff.py @@ -3,11 +3,11 @@ from datetime import datetime import pytz from data.spot import Spot -from providers.http_provider import HTTPProvider +from spotproviders.http_spot_provider import HTTPSpotProvider -# Provider for Worldwide Flora & Fauna -class WWFF(HTTPProvider): +# Spot provider for Worldwide Flora & Fauna +class WWFF(HTTPSpotProvider): POLL_INTERVAL_SEC = 120 SPOTS_URL = "https://spots.wwff.co/static/spots.json" diff --git a/views/webpage_alerts.tpl b/views/webpage_alerts.tpl new file mode 100644 index 0000000..ce83ea3 --- /dev/null +++ b/views/webpage_alerts.tpl @@ -0,0 +1,44 @@ +% rebase('webpage_base.tpl') + +
Loading...
++ +
+Alerts to view: + +
+