mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2026-03-15 12:24:29 +00:00
Compare commits
7 Commits
068c732796
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b0a7e4ea81 | ||
|
|
b6407b4f66 | ||
|
|
30c6222fa0 | ||
|
|
07b7ce49da | ||
|
|
3792e9f4d9 | ||
|
|
6982354364 | ||
|
|
6b18ec6f88 |
@@ -254,10 +254,11 @@ server {
|
||||
}
|
||||
|
||||
location / {
|
||||
add_header Access-Control-Allow-Origin $xssorigin;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
proxy_hide_header Access-Control-Allow-Origin;
|
||||
add_header Access-Control-Allow-Origin $xssorigin;
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -266,9 +267,9 @@ One further change you might want to make to the file above is the `add_header A
|
||||
my own Spothole server to make sure that other third-party web-based software can get the data from my instance, and applies to any endpoint underneath `/api`. If you want
|
||||
*your* Spothole instance to be set up the same way, so that others can write software in JavaScript that can access it,
|
||||
leave this intact. But if you want your Spothole instance to only be usable by scripts running on the web server you write,
|
||||
you can remove this block. (Note that this doesn't stop other people writing *non-web-based* software that accesses your
|
||||
you can remove this line. (Note that this doesn't stop other people writing *non-web-based* software that accesses your
|
||||
Spothole API—the enforcement of cross-origin headers only happens within the user's browser. If you need to lock your
|
||||
instance down so that no-one else can access it with *any* software, that's an aspect of nginx config that you will need
|
||||
instance down so that no-one else can access it with *any* software, that's an aspect of nginx or firewall config that you will need
|
||||
to find help with elsewhere.)
|
||||
|
||||
Now, make a symbolic link to enable the site:
|
||||
|
||||
@@ -5,46 +5,51 @@ import pytz
|
||||
from core.config import MAX_ALERT_AGE
|
||||
|
||||
|
||||
# Generic alert provider class. Subclasses of this query the individual APIs for alerts.
|
||||
class AlertProvider:
|
||||
"""Generic alert provider class. Subclasses of this query the individual APIs for alerts."""
|
||||
|
||||
# Constructor
|
||||
def __init__(self, provider_config):
|
||||
"""Constructor"""
|
||||
|
||||
self.name = provider_config["name"]
|
||||
self.enabled = provider_config["enabled"]
|
||||
self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC)
|
||||
self.status = "Not Started" if self.enabled else "Disabled"
|
||||
self.alerts = None
|
||||
self.web_server = None
|
||||
self._alerts = None
|
||||
self._web_server = None
|
||||
|
||||
# Set up the provider, e.g. giving it the alert list to work from
|
||||
def setup(self, alerts, web_server):
|
||||
self.alerts = alerts
|
||||
self.web_server = web_server
|
||||
"""Set up the provider, e.g. giving it the alert list to work from"""
|
||||
|
||||
self._alerts = alerts
|
||||
self._web_server = web_server
|
||||
|
||||
# Start the provider. This should return immediately after spawning threads to access the remote resources
|
||||
def start(self):
|
||||
"""Start the provider. This should return immediately after spawning threads to access the remote resources"""
|
||||
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
# Submit a batch of alerts retrieved from the provider. There is no timestamp checking like there is for spots,
|
||||
# because alerts could be created at any point for any time in the future. Rely on hashcode-based id matching
|
||||
# to deal with duplicates.
|
||||
def submit_batch(self, alerts):
|
||||
def _submit_batch(self, alerts):
|
||||
"""Submit a batch of alerts retrieved from the provider. There is no timestamp checking like there is for spots,
|
||||
because alerts could be created at any point for any time in the future. Rely on hashcode-based id matching
|
||||
to deal with duplicates."""
|
||||
|
||||
# Sort the batch so that earliest ones go in first. This helps keep the ordering correct when alerts are fired
|
||||
# off to SSE listeners.
|
||||
alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0))
|
||||
alerts = sorted(alerts, key=lambda a: (a.start_time if a and a.start_time else 0))
|
||||
for alert in alerts:
|
||||
# Fill in any blanks and add to the list
|
||||
alert.infer_missing()
|
||||
self.add_alert(alert)
|
||||
self._add_alert(alert)
|
||||
|
||||
def add_alert(self, alert):
|
||||
def _add_alert(self, alert):
|
||||
if not alert.expired():
|
||||
self.alerts.add(alert.id, alert, expire=MAX_ALERT_AGE)
|
||||
self._alerts.add(alert.id, alert, expire=MAX_ALERT_AGE)
|
||||
# Ping the web server in case we have any SSE connections that need to see this immediately
|
||||
if self.web_server:
|
||||
self.web_server.notify_new_alert(alert)
|
||||
if self._web_server:
|
||||
self._web_server.notify_new_alert(alert)
|
||||
|
||||
# Stop any threads and prepare for application shutdown
|
||||
def stop(self):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
"""Stop any threads and prepare for application shutdown"""
|
||||
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
@@ -8,15 +8,16 @@ from data.alert import Alert
|
||||
from data.sig_ref import SIGRef
|
||||
|
||||
|
||||
# Alert provider for Beaches on the Air
|
||||
class BOTA(HTTPAlertProvider):
|
||||
"""Alert provider for Beaches on the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 1800
|
||||
ALERTS_URL = "https://www.beachesontheair.com/"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_alerts(self, http_response):
|
||||
def _http_response_to_alerts(self, http_response):
|
||||
new_alerts = []
|
||||
# Find the table of upcoming alerts
|
||||
bs = BeautifulSoup(http_response.content.decode(), features="lxml")
|
||||
@@ -33,7 +34,7 @@ class BOTA(HTTPAlertProvider):
|
||||
|
||||
# Get the date, dealing with the fact we get no year so have to figure out if it's last year or next year
|
||||
date_text = str(cells[2].find('span').contents[0]).strip()
|
||||
date_time = datetime.strptime(date_text,"%d %b - %H:%M UTC").replace(tzinfo=pytz.UTC)
|
||||
date_time = datetime.strptime(date_text, "%d %b - %H:%M UTC").replace(tzinfo=pytz.UTC)
|
||||
date_time = date_time.replace(year=datetime.now(pytz.UTC).year)
|
||||
# If this was more than a day ago, activation is actually next year
|
||||
if date_time < datetime.now(pytz.UTC) - timedelta(days=1):
|
||||
|
||||
@@ -9,20 +9,21 @@ from alertproviders.alert_provider import AlertProvider
|
||||
from core.constants import HTTP_HEADERS
|
||||
|
||||
|
||||
# Generic alert provider class for providers that request data via HTTP(S). Just for convenience to avoid code
|
||||
# duplication. Subclasses of this query the individual APIs for data.
|
||||
class HTTPAlertProvider(AlertProvider):
|
||||
"""Generic alert provider class for providers that request data via HTTP(S). Just for convenience to avoid code
|
||||
duplication. Subclasses of this query the individual APIs for data."""
|
||||
|
||||
def __init__(self, provider_config, url, poll_interval):
|
||||
super().__init__(provider_config)
|
||||
self.url = url
|
||||
self.poll_interval = poll_interval
|
||||
self._url = url
|
||||
self._poll_interval = poll_interval
|
||||
self._thread = None
|
||||
self._stop_event = Event()
|
||||
|
||||
def start(self):
|
||||
# Fire off the polling thread. It will poll immediately on startup, then sleep for poll_interval between
|
||||
# subsequent polls, so start() returns immediately and the application can continue starting.
|
||||
logging.info("Set up query of " + self.name + " alert API every " + str(self.poll_interval) + " seconds.")
|
||||
logging.info("Set up query of " + self.name + " alert API every " + str(self._poll_interval) + " seconds.")
|
||||
self._thread = Thread(target=self._run, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
@@ -32,32 +33,33 @@ class HTTPAlertProvider(AlertProvider):
|
||||
def _run(self):
|
||||
while True:
|
||||
self._poll()
|
||||
if self._stop_event.wait(timeout=self.poll_interval):
|
||||
if self._stop_event.wait(timeout=self._poll_interval):
|
||||
break
|
||||
|
||||
def _poll(self):
|
||||
try:
|
||||
# Request data from API
|
||||
logging.debug("Polling " + self.name + " alert API...")
|
||||
http_response = requests.get(self.url, headers=HTTP_HEADERS)
|
||||
http_response = requests.get(self._url, headers=HTTP_HEADERS)
|
||||
# Pass off to the subclass for processing
|
||||
new_alerts = self.http_response_to_alerts(http_response)
|
||||
new_alerts = self._http_response_to_alerts(http_response)
|
||||
# Submit the new alerts for processing. There might not be any alerts for the less popular programs.
|
||||
if new_alerts:
|
||||
self.submit_batch(new_alerts)
|
||||
self._submit_batch(new_alerts)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(pytz.UTC)
|
||||
logging.debug("Received data from " + self.name + " alert API.")
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception in HTTP JSON Alert Provider (" + self.name + ")")
|
||||
# Brief pause on error before the next poll, but still respond promptly to stop()
|
||||
self._stop_event.wait(timeout=1)
|
||||
|
||||
# Convert an HTTP response returned by the API into alert data. The whole response is provided here so the subclass
|
||||
# implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever
|
||||
# the API actually provides.
|
||||
def http_response_to_alerts(self, http_response):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
def _http_response_to_alerts(self, http_response):
|
||||
"""Convert an HTTP response returned by the API into alert data. The whole response is provided here so the subclass
|
||||
implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever
|
||||
the API actually provides."""
|
||||
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
@@ -8,8 +8,9 @@ from alertproviders.http_alert_provider import HTTPAlertProvider
|
||||
from data.alert import Alert
|
||||
|
||||
|
||||
# Alert provider NG3K DXpedition list
|
||||
class NG3K(HTTPAlertProvider):
|
||||
"""Alert provider NG3K DXpedition list"""
|
||||
|
||||
POLL_INTERVAL_SEC = 1800
|
||||
ALERTS_URL = "https://www.ng3k.com/adxo.xml"
|
||||
AS_CALL_PATTERN = re.compile("as ([a-z0-9/]+)", re.IGNORECASE)
|
||||
@@ -17,7 +18,7 @@ class NG3K(HTTPAlertProvider):
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_alerts(self, http_response):
|
||||
def _http_response_to_alerts(self, http_response):
|
||||
new_alerts = []
|
||||
rss = RSSParser.parse(http_response.content.decode())
|
||||
# Iterate through source data
|
||||
@@ -48,7 +49,8 @@ class NG3K(HTTPAlertProvider):
|
||||
|
||||
start_timestamp = datetime.strptime(start_year + " " + start_mon + " " + start_day, "%Y %b %d").replace(
|
||||
tzinfo=pytz.UTC).timestamp()
|
||||
end_timestamp = datetime.strptime(end_year + " " + end_mon + " " + end_day + " 23:59", "%Y %b %d %H:%M").replace(
|
||||
end_timestamp = datetime.strptime(end_year + " " + end_mon + " " + end_day + " 23:59",
|
||||
"%Y %b %d %H:%M").replace(
|
||||
tzinfo=pytz.UTC).timestamp()
|
||||
|
||||
# Sometimes the DX callsign is "real", sometimes you just get a prefix with the real working callsigns being
|
||||
@@ -62,7 +64,7 @@ class NG3K(HTTPAlertProvider):
|
||||
dx_calls = [parts[2].upper()]
|
||||
|
||||
# "Calls" of TBA, TBC or TBD are not real attempts at Turkish callsigns
|
||||
dx_calls = list(filter(lambda a: a != "TBA" and a != "TBC" and a != "TBD" , dx_calls))
|
||||
dx_calls = list(filter(lambda a: a != "TBA" and a != "TBC" and a != "TBD", dx_calls))
|
||||
|
||||
dx_country = parts[1]
|
||||
qsl_info = parts[3]
|
||||
|
||||
@@ -8,15 +8,16 @@ from data.alert import Alert
|
||||
from data.sig_ref import SIGRef
|
||||
|
||||
|
||||
# Alert provider for Parks n Peaks
|
||||
class ParksNPeaks(HTTPAlertProvider):
|
||||
"""Alert provider for Parks n Peaks"""
|
||||
|
||||
POLL_INTERVAL_SEC = 1800
|
||||
ALERTS_URL = "http://parksnpeaks.org/api/ALERTS/"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_alerts(self, http_response):
|
||||
def _http_response_to_alerts(self, http_response):
|
||||
new_alerts = []
|
||||
# Iterate through source data
|
||||
for source_alert in http_response.json():
|
||||
@@ -44,7 +45,7 @@ class ParksNPeaks(HTTPAlertProvider):
|
||||
|
||||
# Log a warning for the developer if PnP gives us an unknown programme we've never seen before
|
||||
if sig and sig not in ["POTA", "SOTA", "WWFF", "SiOTA", "ZLOTA", "KRMNPA"]:
|
||||
logging.warn("PNP alert found with sig " + sig + ", developer needs to add support for this!")
|
||||
logging.warning("PNP alert found with sig " + sig + ", developer needs to add support for this!")
|
||||
|
||||
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. Otherwise, add to
|
||||
# the alert list. Note that while ZLOTA has its own spots API, it doesn't have its own alerts API. So that
|
||||
|
||||
@@ -7,15 +7,16 @@ from data.alert import Alert
|
||||
from data.sig_ref import SIGRef
|
||||
|
||||
|
||||
# Alert provider for Parks on the Air
|
||||
class POTA(HTTPAlertProvider):
|
||||
"""Alert provider for Parks on the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 1800
|
||||
ALERTS_URL = "https://api.pota.app/activation"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_alerts(self, http_response):
|
||||
def _http_response_to_alerts(self, http_response):
|
||||
new_alerts = []
|
||||
# Iterate through source data
|
||||
for source_alert in http_response.json():
|
||||
@@ -25,7 +26,8 @@ class POTA(HTTPAlertProvider):
|
||||
dx_calls=[source_alert["activator"].upper()],
|
||||
freqs_modes=source_alert["frequencies"],
|
||||
comment=source_alert["comments"],
|
||||
sig_refs=[SIGRef(id=source_alert["reference"], sig="POTA", name=source_alert["name"], url="https://pota.app/#/park/" + source_alert["reference"])],
|
||||
sig_refs=[SIGRef(id=source_alert["reference"], sig="POTA", name=source_alert["name"],
|
||||
url="https://pota.app/#/park/" + source_alert["reference"])],
|
||||
start_time=datetime.strptime(source_alert["startDate"] + source_alert["startTime"],
|
||||
"%Y-%m-%d%H:%M").replace(tzinfo=pytz.UTC).timestamp(),
|
||||
end_time=datetime.strptime(source_alert["endDate"] + source_alert["endTime"],
|
||||
|
||||
@@ -7,15 +7,16 @@ from data.alert import Alert
|
||||
from data.sig_ref import SIGRef
|
||||
|
||||
|
||||
# Alert provider for Summits on the Air
|
||||
class SOTA(HTTPAlertProvider):
|
||||
"""Alert provider for Summits on the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 1800
|
||||
ALERTS_URL = "https://api-db2.sota.org.uk/api/alerts/365/all/all"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_alerts(self, http_response):
|
||||
def _http_response_to_alerts(self, http_response):
|
||||
new_alerts = []
|
||||
# Iterate through source data
|
||||
for source_alert in http_response.json():
|
||||
@@ -31,7 +32,9 @@ class SOTA(HTTPAlertProvider):
|
||||
dx_names=[source_alert["activatorName"].upper()],
|
||||
freqs_modes=source_alert["frequency"],
|
||||
comment=source_alert["comments"],
|
||||
sig_refs=[SIGRef(id=source_alert["associationCode"] + "/" + source_alert["summitCode"], sig="SOTA", name=summit_name, activation_score=summit_points)],
|
||||
sig_refs=[
|
||||
SIGRef(id=source_alert["associationCode"] + "/" + source_alert["summitCode"], sig="SOTA",
|
||||
name=summit_name, activation_score=summit_points)],
|
||||
start_time=datetime.strptime(source_alert["dateActivated"],
|
||||
"%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.UTC).timestamp(),
|
||||
is_dxpedition=False)
|
||||
|
||||
@@ -8,8 +8,9 @@ from data.alert import Alert
|
||||
from data.sig_ref import SIGRef
|
||||
|
||||
|
||||
# Alert provider for Wainwrights on the Air
|
||||
class WOTA(HTTPAlertProvider):
|
||||
"""Alert provider for Wainwrights on the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 1800
|
||||
ALERTS_URL = "https://www.wota.org.uk/alerts_rss.php"
|
||||
RSS_DATE_TIME_FORMAT = "%a, %d %b %Y %H:%M:%S %z"
|
||||
@@ -17,7 +18,7 @@ class WOTA(HTTPAlertProvider):
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_alerts(self, http_response):
|
||||
def _http_response_to_alerts(self, http_response):
|
||||
new_alerts = []
|
||||
rss = RSSParser.parse(http_response.content.decode())
|
||||
# Iterate through source data
|
||||
|
||||
@@ -7,15 +7,16 @@ from data.alert import Alert
|
||||
from data.sig_ref import SIGRef
|
||||
|
||||
|
||||
# Alert provider for Worldwide Flora and Fauna
|
||||
class WWFF(HTTPAlertProvider):
|
||||
"""Alert provider for Worldwide Flora and Fauna"""
|
||||
|
||||
POLL_INTERVAL_SEC = 1800
|
||||
ALERTS_URL = "https://spots.wwff.co/static/agendas.json"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_alerts(self, http_response):
|
||||
def _http_response_to_alerts(self, http_response):
|
||||
new_alerts = []
|
||||
# Iterate through source data
|
||||
for source_alert in http_response.json():
|
||||
|
||||
@@ -6,6 +6,9 @@
|
||||
# this as "N0CALL" and it shouldn't do any harm, as we're not sending anything to the various networks, only receiving.
|
||||
server-owner-callsign: "N0CALL"
|
||||
|
||||
# The base URL at which the software runs.
|
||||
base-url: "http://localhost:8080"
|
||||
|
||||
# Spot providers to use. This is an example set, tailor it to your liking by commenting and uncommenting.
|
||||
# RBN and APRS-IS are supported but have such a high data rate, you probably don't want them enabled.
|
||||
# Each provider needs a class, a name, and an enabled/disabled state. Some require more config such as hostnames/IP
|
||||
|
||||
@@ -7,4 +7,4 @@ from requests_cache import CachedSession
|
||||
# of time has passed. This is used throughout Spothole to cache data that does not change
|
||||
# rapidly.
|
||||
SEMI_STATIC_URL_DATA_CACHE = CachedSession("cache/semi_static_url_data_cache",
|
||||
expire_after=timedelta(days=30))
|
||||
expire_after=timedelta(days=30))
|
||||
|
||||
@@ -1,70 +1,73 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from threading import Timer, Event, Thread
|
||||
from time import sleep
|
||||
from threading import Event, Thread
|
||||
|
||||
import pytz
|
||||
|
||||
|
||||
# Provides a timed cleanup of the spot list.
|
||||
class CleanupTimer:
|
||||
"""Provides a timed cleanup of the spot list."""
|
||||
|
||||
# Constructor
|
||||
def __init__(self, spots, alerts, web_server, cleanup_interval):
|
||||
self.spots = spots
|
||||
self.alerts = alerts
|
||||
self.web_server = web_server
|
||||
self.cleanup_interval = cleanup_interval
|
||||
self.cleanup_timer = None
|
||||
"""Constructor"""
|
||||
|
||||
self._spots = spots
|
||||
self._alerts = alerts
|
||||
self._web_server = web_server
|
||||
self._cleanup_interval = cleanup_interval
|
||||
self.last_cleanup_time = datetime.min.replace(tzinfo=pytz.UTC)
|
||||
self.status = "Starting"
|
||||
self._thread = None
|
||||
self._stop_event = Event()
|
||||
|
||||
# Start the cleanup timer
|
||||
def start(self):
|
||||
"""Start the cleanup timer"""
|
||||
|
||||
self._thread = Thread(target=self._run, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
# Stop any threads and prepare for application shutdown
|
||||
def stop(self):
|
||||
"""Stop any threads and prepare for application shutdown"""
|
||||
|
||||
self._stop_event.set()
|
||||
|
||||
def _run(self):
|
||||
while not self._stop_event.wait(timeout=self.cleanup_interval):
|
||||
while not self._stop_event.wait(timeout=self._cleanup_interval):
|
||||
self._cleanup()
|
||||
|
||||
# Perform cleanup and reschedule next timer
|
||||
def _cleanup(self):
|
||||
"""Perform cleanup and reschedule next timer"""
|
||||
|
||||
try:
|
||||
# Perform cleanup via letting the data expire
|
||||
self.spots.expire()
|
||||
self.alerts.expire()
|
||||
self._spots.expire()
|
||||
self._alerts.expire()
|
||||
|
||||
# Explicitly clean up any spots and alerts that have expired
|
||||
for id in list(self.spots.iterkeys()):
|
||||
for i in list(self._spots.iterkeys()):
|
||||
try:
|
||||
spot = self.spots[id]
|
||||
spot = self._spots[i]
|
||||
if spot.expired():
|
||||
self.spots.delete(id)
|
||||
self._spots.delete(i)
|
||||
except KeyError:
|
||||
# Must have already been deleted, OK with that
|
||||
pass
|
||||
for id in list(self.alerts.iterkeys()):
|
||||
for i in list(self._alerts.iterkeys()):
|
||||
try:
|
||||
alert = self.alerts[id]
|
||||
alert = self._alerts[i]
|
||||
if alert.expired():
|
||||
self.alerts.delete(id)
|
||||
self._alerts.delete(i)
|
||||
except KeyError:
|
||||
# Must have already been deleted, OK with that
|
||||
pass
|
||||
|
||||
# Clean up web server SSE spot/alert queues
|
||||
self.web_server.clean_up_sse_queues()
|
||||
self._web_server.clean_up_sse_queues()
|
||||
|
||||
self.status = "OK"
|
||||
self.last_cleanup_time = datetime.now(pytz.UTC)
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception in Cleanup thread")
|
||||
self._stop_event.wait(timeout=1)
|
||||
|
||||
@@ -10,9 +10,11 @@ if not os.path.isfile("config.yml"):
|
||||
exit()
|
||||
|
||||
# Load config
|
||||
config = yaml.safe_load(open("config.yml"))
|
||||
with open("config.yml") as f:
|
||||
config = yaml.safe_load(f)
|
||||
logging.info("Loaded config.")
|
||||
|
||||
BASE_URL = config["base-url"]
|
||||
MAX_SPOT_AGE = config["max-spot-age-sec"]
|
||||
MAX_ALERT_AGE = config["max-alert-age-sec"]
|
||||
SERVER_OWNER_CALLSIGN = config["server-owner-callsign"]
|
||||
@@ -23,7 +25,7 @@ WEB_UI_OPTIONS = config["web-ui-options"]
|
||||
# For ease of config, each spot provider owns its own config about whether it should be enabled by default in the web UI
|
||||
# but for consistency we provide this to the front-end in web-ui-options because it has no impact outside of the web UI.
|
||||
WEB_UI_OPTIONS["spot-providers-enabled-by-default"] = [p["name"] for p in config["spot-providers"] if p["enabled"] and (
|
||||
"enabled-by-default-in-web-ui" not in p or p["enabled-by-default-in-web-ui"] == True)]
|
||||
"enabled-by-default-in-web-ui" not in p or p["enabled-by-default-in-web-ui"])]
|
||||
# If spotting to this server is enabled, "API" is another valid spot source even though it does not come from
|
||||
# one of our proviers. We set that to also be enabled by default.
|
||||
if ALLOW_SPOTTING:
|
||||
|
||||
@@ -12,27 +12,27 @@ HAMQTH_PRG = (SOFTWARE_NAME + " v" + SOFTWARE_VERSION + " operated by " + SERVER
|
||||
|
||||
# Special Interest Groups
|
||||
SIGS = [
|
||||
SIG(name="POTA", description="Parks on the Air", ref_regex=r"[A-Z]{2}\-\d{4,5}"),
|
||||
SIG(name="SOTA", description="Summits on the Air", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{2}\-\d{3}"),
|
||||
SIG(name="WWFF", description="World Wide Flora & Fauna", ref_regex=r"[A-Z0-9]{1,3}FF\-\d{4}"),
|
||||
SIG(name="GMA", description="Global Mountain Activity", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{2}\-\d{3}"),
|
||||
SIG(name="WWBOTA", description="Worldwide Bunkers on the Air", ref_regex=r"B\/[A-Z0-9]{1,3}\-\d{3,4}"),
|
||||
SIG(name="HEMA", description="HuMPs Excluding Marilyns Award", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{3}\-\d{3}"),
|
||||
SIG(name="IOTA", description="Islands on the Air", ref_regex=r"[A-Z]{2}\-\d{3}"),
|
||||
SIG(name="MOTA", description="Mills on the Air", ref_regex=r"X\d{4-6}"),
|
||||
SIG(name="ARLHS", description="Amateur Radio Lighthouse Society", ref_regex=r"[A-Z]{3}\-\d{3,4}"),
|
||||
SIG(name="ILLW", description="International Lighthouse & Lightship Weekend", ref_regex=r"[A-Z]{2}\d{4}"),
|
||||
SIG(name="SIOTA", description="Silos on the Air", ref_regex=r"[A-Z]{2}\-[A-Z]{3}\d"),
|
||||
SIG(name="WCA", description="World Castles Award", ref_regex=r"[A-Z0-9]{1,3}\-\d{5}"),
|
||||
SIG(name="ZLOTA", description="New Zealand on the Air", ref_regex=r"ZL[A-Z]/[A-Z]{2}\-\d{3,4}"),
|
||||
SIG(name="WOTA", description="Wainwrights on the Air", ref_regex=r"[A-Z]{3}-[0-9]{2}"),
|
||||
SIG(name="BOTA", description="Beaches on the Air"),
|
||||
SIG(name="KRMNPA", description="Keith Roget Memorial National Parks Award"),
|
||||
SIG(name="LLOTA", description="Lagos y Lagunas on the Air", ref_regex=r"[A-Z]{2}\-\d{4}"),
|
||||
SIG(name="WWTOTA", description="Towers on the Air", ref_regex=r"[A-Z]{2}R\-\d{4}"),
|
||||
SIG(name="WAB", description="Worked All Britain", ref_regex=r"[A-Z]{1,2}[0-9]{2}"),
|
||||
SIG(name="WAI", description="Worked All Ireland", ref_regex=r"[A-Z][0-9]{2}"),
|
||||
SIG(name="TOTA", description="Toilets on the Air", ref_regex=r"T\-[0-9]{2}")
|
||||
SIG(name="POTA", description="Parks on the Air", ref_regex=r"[A-Z]{2}\-\d{4,5}"),
|
||||
SIG(name="SOTA", description="Summits on the Air", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{2}\-\d{3}"),
|
||||
SIG(name="WWFF", description="World Wide Flora & Fauna", ref_regex=r"[A-Z0-9]{1,3}FF\-\d{4}"),
|
||||
SIG(name="GMA", description="Global Mountain Activity", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{2}\-\d{3}"),
|
||||
SIG(name="WWBOTA", description="Worldwide Bunkers on the Air", ref_regex=r"B\/[A-Z0-9]{1,3}\-\d{3,4}"),
|
||||
SIG(name="HEMA", description="HuMPs Excluding Marilyns Award", ref_regex=r"[A-Z0-9]{1,3}\/[A-Z]{3}\-\d{3}"),
|
||||
SIG(name="IOTA", description="Islands on the Air", ref_regex=r"[A-Z]{2}\-\d{3}"),
|
||||
SIG(name="MOTA", description="Mills on the Air", ref_regex=r"X\d{4,6}"),
|
||||
SIG(name="ARLHS", description="Amateur Radio Lighthouse Society", ref_regex=r"[A-Z]{3}\-\d{3,4}"),
|
||||
SIG(name="ILLW", description="International Lighthouse & Lightship Weekend", ref_regex=r"[A-Z]{2}\d{4}"),
|
||||
SIG(name="SIOTA", description="Silos on the Air", ref_regex=r"[A-Z]{2}\-[A-Z]{3}\d"),
|
||||
SIG(name="WCA", description="World Castles Award", ref_regex=r"[A-Z0-9]{1,3}\-\d{5}"),
|
||||
SIG(name="ZLOTA", description="New Zealand on the Air", ref_regex=r"ZL[A-Z]/[A-Z]{2}\-\d{3,4}"),
|
||||
SIG(name="WOTA", description="Wainwrights on the Air", ref_regex=r"[A-Z]{3}-[0-9]{2}"),
|
||||
SIG(name="BOTA", description="Beaches on the Air"),
|
||||
SIG(name="KRMNPA", description="Keith Roget Memorial National Parks Award"),
|
||||
SIG(name="LLOTA", description="Lagos y Lagunas on the Air", ref_regex=r"[A-Z]{2}\-\d{4}"),
|
||||
SIG(name="WWTOTA", description="Towers on the Air", ref_regex=r"[A-Z]{2}R\-\d{4}"),
|
||||
SIG(name="WAB", description="Worked All Britain", ref_regex=r"[A-Z]{1,2}[0-9]{2}"),
|
||||
SIG(name="WAI", description="Worked All Ireland", ref_regex=r"[A-Z][0-9]{2}"),
|
||||
SIG(name="TOTA", description="Toilets on the Air", ref_regex=r"T\-[0-9]{2}")
|
||||
]
|
||||
|
||||
# Modes. Note "DIGI" and "DIGITAL" are also supported but are normalised into "DATA".
|
||||
|
||||
@@ -18,8 +18,10 @@ for idx in cq_zone_data.index:
|
||||
for idx in itu_zone_data.index:
|
||||
prepare(itu_zone_data.at[idx, 'geometry'])
|
||||
|
||||
# Finds out which CQ zone a lat/lon point is in.
|
||||
|
||||
def lat_lon_to_cq_zone(lat, lon):
|
||||
"""Finds out which CQ zone a lat/lon point is in."""
|
||||
|
||||
lon = ((lon + 180) % 360) - 180
|
||||
for index, row in cq_zone_data.iterrows():
|
||||
polygon = Polygon(row["geometry"])
|
||||
@@ -38,8 +40,9 @@ def lat_lon_to_cq_zone(lat, lon):
|
||||
return None
|
||||
|
||||
|
||||
# Finds out which ITU zone a lat/lon point is in.
|
||||
def lat_lon_to_itu_zone(lat, lon):
|
||||
"""Finds out which ITU zone a lat/lon point is in."""
|
||||
|
||||
lon = ((lon + 180) % 360) - 180
|
||||
for index, row in itu_zone_data.iterrows():
|
||||
polygon = Polygon(row["geometry"])
|
||||
@@ -58,9 +61,10 @@ def lat_lon_to_itu_zone(lat, lon):
|
||||
return None
|
||||
|
||||
|
||||
# Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the centre point of the square.
|
||||
# Returns None if the grid format is invalid.
|
||||
def lat_lon_for_grid_centre(grid):
|
||||
"""Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the centre point of the square.
|
||||
Returns None if the grid format is invalid."""
|
||||
|
||||
lat, lon, lat_cell_size, lon_cell_size = lat_lon_for_grid_sw_corner_plus_size(grid)
|
||||
if lat is not None and lon is not None and lat_cell_size is not None and lon_cell_size is not None:
|
||||
return [lat + lat_cell_size / 2.0, lon + lon_cell_size / 2.0]
|
||||
@@ -68,18 +72,21 @@ def lat_lon_for_grid_centre(grid):
|
||||
return None
|
||||
|
||||
|
||||
# Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the southwest corner of the square.
|
||||
# Returns None if the grid format is invalid.
|
||||
def lat_lon_for_grid_sw_corner(grid):
|
||||
"""Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the southwest corner of the square.
|
||||
Returns None if the grid format is invalid."""
|
||||
|
||||
lat, lon, lat_cell_size, lon_cell_size = lat_lon_for_grid_sw_corner_plus_size(grid)
|
||||
if lat is not None and lon is not None:
|
||||
return [lat, lon]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the northeast corner of the square.
|
||||
# Returns None if the grid format is invalid.
|
||||
|
||||
def lat_lon_for_grid_ne_corner(grid):
|
||||
"""Convert a Maidenhead grid reference of arbitrary precision to the lat/long of the northeast corner of the square.
|
||||
Returns None if the grid format is invalid."""
|
||||
|
||||
lat, lon, lat_cell_size, lon_cell_size = lat_lon_for_grid_sw_corner_plus_size(grid)
|
||||
if lat is not None and lon is not None and lat_cell_size is not None and lon_cell_size is not None:
|
||||
return [lat + lat_cell_size, lon + lon_cell_size]
|
||||
@@ -87,18 +94,19 @@ def lat_lon_for_grid_ne_corner(grid):
|
||||
return None
|
||||
|
||||
|
||||
# Convert a Maidenhead grid reference of arbitrary precision to lat/long, including in the result the size of the
|
||||
# lowest grid square. This is a utility method used by the main methods that return the centre, southwest, and
|
||||
# northeast coordinates of a grid square.
|
||||
# The return type is always a tuple of size 4. The elements in it are None if the grid format is invalid.
|
||||
def lat_lon_for_grid_sw_corner_plus_size(grid):
|
||||
"""Convert a Maidenhead grid reference of arbitrary precision to lat/long, including in the result the size of the
|
||||
lowest grid square. This is a utility method used by the main methods that return the centre, southwest, and
|
||||
northeast coordinates of a grid square.
|
||||
The return type is always a tuple of size 4. The elements in it are None if the grid format is invalid."""
|
||||
|
||||
# Make sure we are in upper case so our maths works. Case is arbitrary for Maidenhead references
|
||||
grid = grid.upper()
|
||||
|
||||
# Return None if our Maidenhead string is invalid or too short
|
||||
length = len(grid)
|
||||
if length <= 0 or (length % 2) != 0:
|
||||
return (None, None, None, None)
|
||||
return None, None, None, None
|
||||
|
||||
lat = 0.0 # aggregated latitude
|
||||
lon = 0.0 # aggregated longitude
|
||||
@@ -116,17 +124,17 @@ def lat_lon_for_grid_sw_corner_plus_size(grid):
|
||||
# A-X (0-23) thereafter.
|
||||
max_cell_no = 17 if block == 0 else 23
|
||||
if lat_cell_no < 0 or lat_cell_no > max_cell_no or lon_cell_no < 0 or lon_cell_no > max_cell_no:
|
||||
return (None, None, None, None)
|
||||
return None, None, None, None
|
||||
else:
|
||||
# Numbers in this block
|
||||
try:
|
||||
lon_cell_no = int(grid[block * 2])
|
||||
lat_cell_no = int(grid[block * 2 + 1])
|
||||
except ValueError:
|
||||
return (None, None, None, None)
|
||||
return None, None, None, None
|
||||
# Bail if the values aren't in range 0-9
|
||||
if lat_cell_no < 0 or lat_cell_no > 9 or lon_cell_no < 0 or lon_cell_no > 9:
|
||||
return (None, None, None, None)
|
||||
return None, None, None, None
|
||||
|
||||
# Aggregate the angles
|
||||
lat += lat_cell_no * lat_cell_size
|
||||
@@ -157,8 +165,9 @@ def lat_lon_for_grid_sw_corner_plus_size(grid):
|
||||
return lat, lon, lat_cell_size, lon_cell_size
|
||||
|
||||
|
||||
# Convert a Worked All Britain or Worked All Ireland reference to a lat/lon point.
|
||||
def wab_wai_square_to_lat_lon(ref):
|
||||
"""Convert a Worked All Britain or Worked All Ireland reference to a lat/lon point."""
|
||||
|
||||
# First check we have a valid grid square, and based on what it looks like, use either the Ordnance Survey, Irish,
|
||||
# or UTM grid systems to perform the conversion.
|
||||
if re.match(r"^[HNOST][ABCDEFGHJKLMNOPQRSTUVWXYZ][0-9]{2}$", ref):
|
||||
@@ -172,8 +181,9 @@ def wab_wai_square_to_lat_lon(ref):
|
||||
return None
|
||||
|
||||
|
||||
# Get a lat/lon point for the centre of an Ordnance Survey grid square
|
||||
def os_grid_square_to_lat_lon(ref):
|
||||
"""Get a lat/lon point for the centre of an Ordnance Survey grid square"""
|
||||
|
||||
# Convert the letters into multipliers for the 500km squares and 100km squares
|
||||
offset_500km_multiplier = ord(ref[0]) - 65
|
||||
offset_100km_multiplier = ord(ref[1]) - 65
|
||||
@@ -202,8 +212,9 @@ def os_grid_square_to_lat_lon(ref):
|
||||
return lat, lon
|
||||
|
||||
|
||||
# Get a lat/lon point for the centre of an Irish Grid square.
|
||||
def irish_grid_square_to_lat_lon(ref):
|
||||
"""Get a lat/lon point for the centre of an Irish Grid square."""
|
||||
|
||||
# Convert the letters into multipliers for the 100km squares
|
||||
offset_100km_multiplier = ord(ref[0]) - 65
|
||||
|
||||
@@ -229,8 +240,9 @@ def irish_grid_square_to_lat_lon(ref):
|
||||
return lat, lon
|
||||
|
||||
|
||||
# Get a lat/lon point for the centre of a UTM grid square (supports only squares WA & WV for the Channel Islands, nothing else implemented)
|
||||
def utm_grid_square_to_lat_lon(ref):
|
||||
"""Get a lat/lon point for the centre of a UTM grid square (supports only squares WA & WV for the Channel Islands, nothing else implemented)"""
|
||||
|
||||
# Take the numeric parts of the grid square and multiply by 10000 to get metres from the corner of the letter-based grid square
|
||||
easting = int(ref[2]) * 10000
|
||||
northing = int(ref[3]) * 10000
|
||||
|
||||
@@ -19,37 +19,38 @@ from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODE
|
||||
HTTP_HEADERS, HAMQTH_PRG, MODE_ALIASES
|
||||
|
||||
|
||||
# Singleton class that provides lookup functionality.
|
||||
class LookupHelper:
|
||||
"""Singleton class that provides lookup functionality."""
|
||||
|
||||
# Create the lookup helper. Note that nothing actually happens until the start() method is called, and that all
|
||||
# lookup methods will fail if start() has not yet been called. This therefore needs starting before any spot or
|
||||
# alert handlers are created.
|
||||
def __init__(self):
|
||||
self.CLUBLOG_CALLSIGN_DATA_CACHE = None
|
||||
self.LOOKUP_LIB_CLUBLOG_XML = None
|
||||
self.CLUBLOG_XML_AVAILABLE = None
|
||||
self.LOOKUP_LIB_CLUBLOG_API = None
|
||||
self.CLUBLOG_XML_DOWNLOAD_LOCATION = None
|
||||
self.CLUBLOG_API_AVAILABLE = None
|
||||
self.CLUBLOG_CTY_XML_CACHE = None
|
||||
self.CLUBLOG_API_KEY = None
|
||||
self.QRZ_CALLSIGN_DATA_CACHE = None
|
||||
self.LOOKUP_LIB_QRZ = None
|
||||
self.QRZ_AVAILABLE = None
|
||||
self.HAMQTH_AVAILABLE = None
|
||||
self.HAMQTH_CALLSIGN_DATA_CACHE = None
|
||||
self.HAMQTH_BASE_URL = "https://www.hamqth.com/xml.php"
|
||||
"""Create the lookup helper. Note that nothing actually happens until the start() method is called, and that all
|
||||
lookup methods will fail if start() has not yet been called. This therefore needs starting before any spot or
|
||||
alert handlers are created."""
|
||||
|
||||
self._clublog_callsign_data_cache = None
|
||||
self._lookup_lib_clublog_xml = None
|
||||
self._clublog_xml_available = None
|
||||
self._lookup_lib_clublog_api = None
|
||||
self._clublog_xml_download_location = None
|
||||
self._clublog_api_available = None
|
||||
self._clublog_cty_xml_cache = None
|
||||
self._clublog_api_key = None
|
||||
self._qrz_callsign_data_cache = None
|
||||
self._lookup_lib_qrz = None
|
||||
self._qrz_available = None
|
||||
self._hamqth_available = None
|
||||
self._hamqth_callsign_data_cache = None
|
||||
self._hamqth_base_url = "https://www.hamqth.com/xml.php"
|
||||
# HamQTH session keys expire after an hour. Rather than working out how much time has passed manually, we cheat
|
||||
# and cache the HTTP response for 55 minutes, so when the login URL is queried within 55 minutes of the previous
|
||||
# time, you just get the cached response.
|
||||
self.HAMQTH_SESSION_LOOKUP_CACHE = CachedSession("cache/hamqth_session_cache",
|
||||
expire_after=timedelta(minutes=55))
|
||||
self.CALL_INFO_BASIC = None
|
||||
self.LOOKUP_LIB_BASIC = None
|
||||
self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION = None
|
||||
self.DXCC_JSON_DOWNLOAD_LOCATION = None
|
||||
self.DXCC_DATA = None
|
||||
self._hamqth_session_lookup_cache = CachedSession("cache/hamqth_session_cache",
|
||||
expire_after=timedelta(minutes=55))
|
||||
self._call_info_basic = None
|
||||
self._lookup_lib_basic = None
|
||||
self._country_files_cty_plist_download_location = None
|
||||
self._dxcc_json_download_location = None
|
||||
self._dxcc_data = None
|
||||
|
||||
def start(self):
|
||||
# Lookup helpers from pyhamtools. We use five (!) of these. The simplest is country-files.com, which downloads
|
||||
@@ -57,65 +58,66 @@ class LookupHelper:
|
||||
# If the user provides login details/API keys, we also set up helpers for QRZ.com, HamQTH, Clublog (live API
|
||||
# request), and Clublog (XML download). The lookup functions iterate through these in a sensible order, looking
|
||||
# for suitable data.
|
||||
self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION = "cache/cty.plist"
|
||||
success = self.download_country_files_cty_plist()
|
||||
self._country_files_cty_plist_download_location = "cache/cty.plist"
|
||||
success = self._download_country_files_cty_plist()
|
||||
if success:
|
||||
self.LOOKUP_LIB_BASIC = LookupLib(lookuptype="countryfile",
|
||||
filename=self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION)
|
||||
self._lookup_lib_basic = LookupLib(lookuptype="countryfile",
|
||||
filename=self._country_files_cty_plist_download_location)
|
||||
else:
|
||||
self.LOOKUP_LIB_BASIC = LookupLib(lookuptype="countryfile")
|
||||
self.CALL_INFO_BASIC = Callinfo(self.LOOKUP_LIB_BASIC)
|
||||
self._lookup_lib_basic = LookupLib(lookuptype="countryfile")
|
||||
self._call_info_basic = Callinfo(self._lookup_lib_basic)
|
||||
|
||||
self.QRZ_AVAILABLE = config["qrz-username"] != "" and config["qrz-password"] != ""
|
||||
if self.QRZ_AVAILABLE:
|
||||
self.LOOKUP_LIB_QRZ = LookupLib(lookuptype="qrz", username=config["qrz-username"],
|
||||
pwd=config["qrz-password"])
|
||||
self.QRZ_CALLSIGN_DATA_CACHE = Cache('cache/qrz_callsign_lookup_cache')
|
||||
self._qrz_available = config["qrz-username"] != "" and config["qrz-password"] != ""
|
||||
if self._qrz_available:
|
||||
self._lookup_lib_qrz = LookupLib(lookuptype="qrz", username=config["qrz-username"],
|
||||
pwd=config["qrz-password"])
|
||||
self._qrz_callsign_data_cache = Cache('cache/qrz_callsign_lookup_cache')
|
||||
|
||||
self.HAMQTH_AVAILABLE = config["hamqth-username"] != "" and config["hamqth-password"] != ""
|
||||
self.HAMQTH_CALLSIGN_DATA_CACHE = Cache('cache/hamqth_callsign_lookup_cache')
|
||||
self._hamqth_available = config["hamqth-username"] != "" and config["hamqth-password"] != ""
|
||||
self._hamqth_callsign_data_cache = Cache('cache/hamqth_callsign_lookup_cache')
|
||||
|
||||
self.CLUBLOG_API_KEY = config["clublog-api-key"]
|
||||
self.CLUBLOG_CTY_XML_CACHE = CachedSession("cache/clublog_cty_xml_cache", expire_after=timedelta(days=10))
|
||||
self.CLUBLOG_API_AVAILABLE = self.CLUBLOG_API_KEY != ""
|
||||
self.CLUBLOG_XML_DOWNLOAD_LOCATION = "cache/cty.xml"
|
||||
if self.CLUBLOG_API_AVAILABLE:
|
||||
self.LOOKUP_LIB_CLUBLOG_API = LookupLib(lookuptype="clublogapi", apikey=self.CLUBLOG_API_KEY)
|
||||
success = self.download_clublog_ctyxml()
|
||||
self.CLUBLOG_XML_AVAILABLE = success
|
||||
self._clublog_api_key = config["clublog-api-key"]
|
||||
self._clublog_cty_xml_cache = CachedSession("cache/clublog_cty_xml_cache", expire_after=timedelta(days=10))
|
||||
self._clublog_api_available = self._clublog_api_key != ""
|
||||
self._clublog_xml_download_location = "cache/cty.xml"
|
||||
if self._clublog_api_available:
|
||||
self._lookup_lib_clublog_api = LookupLib(lookuptype="clublogapi", apikey=self._clublog_api_key)
|
||||
success = self._download_clublog_ctyxml()
|
||||
self._clublog_xml_available = success
|
||||
if success:
|
||||
self.LOOKUP_LIB_CLUBLOG_XML = LookupLib(lookuptype="clublogxml",
|
||||
filename=self.CLUBLOG_XML_DOWNLOAD_LOCATION)
|
||||
self.CLUBLOG_CALLSIGN_DATA_CACHE = Cache('cache/clublog_callsign_lookup_cache')
|
||||
self._lookup_lib_clublog_xml = LookupLib(lookuptype="clublogxml",
|
||||
filename=self._clublog_xml_download_location)
|
||||
self._clublog_callsign_data_cache = Cache('cache/clublog_callsign_lookup_cache')
|
||||
|
||||
# We also get a lookup of DXCC data from K0SWE to use for additional lookups of e.g. flags.
|
||||
self.DXCC_JSON_DOWNLOAD_LOCATION = "cache/dxcc.json"
|
||||
success = self.download_dxcc_json()
|
||||
self._dxcc_json_download_location = "cache/dxcc.json"
|
||||
success = self._download_dxcc_json()
|
||||
if success:
|
||||
with open(self.DXCC_JSON_DOWNLOAD_LOCATION) as f:
|
||||
with open(self._dxcc_json_download_location) as f:
|
||||
tmp_dxcc_data = json.load(f)["dxcc"]
|
||||
# Reformat as a map for faster lookup
|
||||
self.DXCC_DATA = {}
|
||||
self._dxcc_data = {}
|
||||
for dxcc in tmp_dxcc_data:
|
||||
self.DXCC_DATA[dxcc["entityCode"]] = dxcc
|
||||
self._dxcc_data[dxcc["entityCode"]] = dxcc
|
||||
else:
|
||||
logging.error("Could not download DXCC data, flags and similar data may be missing!")
|
||||
|
||||
# Precompile regex matches for DXCCs to improve efficiency when iterating through them
|
||||
for dxcc in self.DXCC_DATA.values():
|
||||
for dxcc in (self._dxcc_data.values() if self._dxcc_data else []):
|
||||
dxcc["_prefixRegexCompiled"] = re.compile(dxcc["prefixRegex"])
|
||||
|
||||
# Download the cty.plist file from country-files.com on first startup. The pyhamtools lib can actually download and use
|
||||
# this itself, but it's occasionally offline which causes it to throw an error. By downloading it separately, we can
|
||||
# catch errors and handle them, falling back to a previous copy of the file in the cache, and we can use the
|
||||
# requests_cache library to prevent re-downloading too quickly if the software keeps restarting.
|
||||
def download_country_files_cty_plist(self):
|
||||
def _download_country_files_cty_plist(self):
|
||||
"""Download the cty.plist file from country-files.com on first startup. The pyhamtools lib can actually download and use
|
||||
this itself, but it's occasionally offline which causes it to throw an error. By downloading it separately, we can
|
||||
catch errors and handle them, falling back to a previous copy of the file in the cache, and we can use the
|
||||
requests_cache library to prevent re-downloading too quickly if the software keeps restarting."""
|
||||
|
||||
try:
|
||||
logging.info("Downloading Country-files.com cty.plist...")
|
||||
response = SEMI_STATIC_URL_DATA_CACHE.get("https://www.country-files.com/cty/cty.plist",
|
||||
headers=HTTP_HEADERS).text
|
||||
|
||||
with open(self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION, "w") as f:
|
||||
with open(self._country_files_cty_plist_download_location, "w") as f:
|
||||
f.write(response)
|
||||
f.flush()
|
||||
return True
|
||||
@@ -124,14 +126,16 @@ class LookupHelper:
|
||||
logging.error("Exception when downloading Clublog cty.xml", e)
|
||||
return False
|
||||
|
||||
# Download the dxcc.json file on first startup.
|
||||
def download_dxcc_json(self):
|
||||
def _download_dxcc_json(self):
|
||||
"""Download the dxcc.json file on first startup."""
|
||||
|
||||
try:
|
||||
logging.info("Downloading dxcc.json...")
|
||||
response = SEMI_STATIC_URL_DATA_CACHE.get("https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json",
|
||||
headers=HTTP_HEADERS).text
|
||||
response = SEMI_STATIC_URL_DATA_CACHE.get(
|
||||
"https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json",
|
||||
headers=HTTP_HEADERS).text
|
||||
|
||||
with open(self.DXCC_JSON_DOWNLOAD_LOCATION, "w") as f:
|
||||
with open(self._dxcc_json_download_location, "w") as f:
|
||||
f.write(response)
|
||||
f.flush()
|
||||
return True
|
||||
@@ -140,19 +144,20 @@ class LookupHelper:
|
||||
logging.error("Exception when downloading dxcc.json", e)
|
||||
return False
|
||||
|
||||
# Download the cty.xml (gzipped) file from Clublog on first startup, so we can use it in preference to querying the
|
||||
# database live if possible.
|
||||
def download_clublog_ctyxml(self):
|
||||
def _download_clublog_ctyxml(self):
|
||||
"""Download the cty.xml (gzipped) file from Clublog on first startup, so we can use it in preference to querying the
|
||||
database live if possible."""
|
||||
|
||||
try:
|
||||
logging.info("Downloading Clublog cty.xml.gz...")
|
||||
response = self.CLUBLOG_CTY_XML_CACHE.get("https://cdn.clublog.org/cty.php?api=" + self.CLUBLOG_API_KEY,
|
||||
headers=HTTP_HEADERS)
|
||||
response = self._clublog_cty_xml_cache.get("https://cdn.clublog.org/cty.php?api=" + self._clublog_api_key,
|
||||
headers=HTTP_HEADERS)
|
||||
logging.info("Caching Clublog cty.xml.gz...")
|
||||
open(self.CLUBLOG_XML_DOWNLOAD_LOCATION + ".gz", 'wb').write(response.content)
|
||||
with gzip.open(self.CLUBLOG_XML_DOWNLOAD_LOCATION + ".gz", "rb") as uncompressed:
|
||||
open(self._clublog_xml_download_location + ".gz", 'wb').write(response.content)
|
||||
with gzip.open(self._clublog_xml_download_location + ".gz", "rb") as uncompressed:
|
||||
file_content = uncompressed.read()
|
||||
logging.info("Caching Clublog cty.xml...")
|
||||
with open(self.CLUBLOG_XML_DOWNLOAD_LOCATION, "wb") as f:
|
||||
with open(self._clublog_xml_download_location, "wb") as f:
|
||||
f.write(file_content)
|
||||
f.flush()
|
||||
return True
|
||||
@@ -161,247 +166,234 @@ class LookupHelper:
|
||||
logging.error("Exception when downloading Clublog cty.xml", e)
|
||||
return False
|
||||
|
||||
# Infer a mode from the comment
|
||||
def infer_mode_from_comment(self, comment):
|
||||
for mode in ALL_MODES:
|
||||
if mode in comment.upper():
|
||||
return mode
|
||||
for mode in MODE_ALIASES.keys():
|
||||
if mode in comment.upper():
|
||||
return MODE_ALIASES[mode]
|
||||
return None
|
||||
|
||||
# Infer a "mode family" from a mode.
|
||||
def infer_mode_type_from_mode(self, mode):
|
||||
if mode.upper() in CW_MODES:
|
||||
return "CW"
|
||||
elif mode.upper() in PHONE_MODES:
|
||||
return "PHONE"
|
||||
elif mode.upper() in DATA_MODES:
|
||||
return "DATA"
|
||||
else:
|
||||
if mode.upper() != "OTHER":
|
||||
logging.warn("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
|
||||
return None
|
||||
|
||||
# Infer a band from a frequency in Hz
|
||||
def infer_band_from_freq(self, freq):
|
||||
for b in BANDS:
|
||||
if b.start_freq <= freq <= b.end_freq:
|
||||
return b
|
||||
return UNKNOWN_BAND
|
||||
|
||||
# Infer a country name from a callsign
|
||||
def infer_country_from_callsign(self, call):
|
||||
"""Infer a country name from a callsign"""
|
||||
|
||||
try:
|
||||
# Start with the basic country-files.com-based decoder.
|
||||
country = self.CALL_INFO_BASIC.get_country_name(call)
|
||||
except (KeyError, ValueError) as e:
|
||||
country = self._call_info_basic.get_country_name(call)
|
||||
except (KeyError, ValueError):
|
||||
country = None
|
||||
# Couldn't get anything from basic call info database, try QRZ.com
|
||||
if not country:
|
||||
qrz_data = self.get_qrz_data_for_callsign(call)
|
||||
qrz_data = self._get_qrz_data_for_callsign(call)
|
||||
if qrz_data and "country" in qrz_data:
|
||||
country = qrz_data["country"]
|
||||
# Couldn't get anything from QRZ.com database, try HamQTH
|
||||
if not country:
|
||||
hamqth_data = self.get_hamqth_data_for_callsign(call)
|
||||
hamqth_data = self._get_hamqth_data_for_callsign(call)
|
||||
if hamqth_data and "country" in hamqth_data:
|
||||
country = hamqth_data["country"]
|
||||
# Couldn't get anything from HamQTH database, try Clublog data
|
||||
if not country:
|
||||
clublog_data = self.get_clublog_xml_data_for_callsign(call)
|
||||
clublog_data = self._get_clublog_xml_data_for_callsign(call)
|
||||
if clublog_data and "Name" in clublog_data:
|
||||
country = clublog_data["Name"]
|
||||
if not country:
|
||||
clublog_data = self.get_clublog_api_data_for_callsign(call)
|
||||
clublog_data = self._get_clublog_api_data_for_callsign(call)
|
||||
if clublog_data and "Name" in clublog_data:
|
||||
country = clublog_data["Name"]
|
||||
# Couldn't get anything from Clublog database, try DXCC data
|
||||
if not country:
|
||||
dxcc_data = self.get_dxcc_data_for_callsign(call)
|
||||
dxcc_data = self._get_dxcc_data_for_callsign(call)
|
||||
if dxcc_data and "name" in dxcc_data:
|
||||
country = dxcc_data["name"]
|
||||
return country
|
||||
|
||||
# Infer a DXCC ID from a callsign
|
||||
def infer_dxcc_id_from_callsign(self, call):
|
||||
"""Infer a DXCC ID from a callsign"""
|
||||
|
||||
try:
|
||||
# Start with the basic country-files.com-based decoder.
|
||||
dxcc = self.CALL_INFO_BASIC.get_adif_id(call)
|
||||
except (KeyError, ValueError) as e:
|
||||
dxcc = self._call_info_basic.get_adif_id(call)
|
||||
except (KeyError, ValueError):
|
||||
dxcc = None
|
||||
# Couldn't get anything from basic call info database, try QRZ.com
|
||||
if not dxcc:
|
||||
qrz_data = self.get_qrz_data_for_callsign(call)
|
||||
qrz_data = self._get_qrz_data_for_callsign(call)
|
||||
if qrz_data and "adif" in qrz_data:
|
||||
dxcc = qrz_data["adif"]
|
||||
# Couldn't get anything from QRZ.com database, try HamQTH
|
||||
if not dxcc:
|
||||
hamqth_data = self.get_hamqth_data_for_callsign(call)
|
||||
hamqth_data = self._get_hamqth_data_for_callsign(call)
|
||||
if hamqth_data and "adif" in hamqth_data:
|
||||
dxcc = hamqth_data["adif"]
|
||||
# Couldn't get anything from HamQTH database, try Clublog data
|
||||
if not dxcc:
|
||||
clublog_data = self.get_clublog_xml_data_for_callsign(call)
|
||||
clublog_data = self._get_clublog_xml_data_for_callsign(call)
|
||||
if clublog_data and "DXCC" in clublog_data:
|
||||
dxcc = clublog_data["DXCC"]
|
||||
if not dxcc:
|
||||
clublog_data = self.get_clublog_api_data_for_callsign(call)
|
||||
clublog_data = self._get_clublog_api_data_for_callsign(call)
|
||||
if clublog_data and "DXCC" in clublog_data:
|
||||
dxcc = clublog_data["DXCC"]
|
||||
# Couldn't get anything from Clublog database, try DXCC data
|
||||
if not dxcc:
|
||||
dxcc_data = self.get_dxcc_data_for_callsign(call)
|
||||
dxcc_data = self._get_dxcc_data_for_callsign(call)
|
||||
if dxcc_data and "entityCode" in dxcc_data:
|
||||
dxcc = dxcc_data["entityCode"]
|
||||
return dxcc
|
||||
|
||||
# Infer a continent shortcode from a callsign
|
||||
def infer_continent_from_callsign(self, call):
|
||||
"""Infer a continent shortcode from a callsign"""
|
||||
|
||||
try:
|
||||
# Start with the basic country-files.com-based decoder.
|
||||
continent = self.CALL_INFO_BASIC.get_continent(call)
|
||||
except (KeyError, ValueError) as e:
|
||||
continent = self._call_info_basic.get_continent(call)
|
||||
except (KeyError, ValueError):
|
||||
continent = None
|
||||
# Couldn't get anything from basic call info database, try HamQTH
|
||||
if not continent:
|
||||
hamqth_data = self.get_hamqth_data_for_callsign(call)
|
||||
hamqth_data = self._get_hamqth_data_for_callsign(call)
|
||||
if hamqth_data and "continent" in hamqth_data:
|
||||
country = hamqth_data["continent"]
|
||||
continent = hamqth_data["continent"]
|
||||
# Couldn't get anything from HamQTH database, try Clublog data
|
||||
if not continent:
|
||||
clublog_data = self.get_clublog_xml_data_for_callsign(call)
|
||||
clublog_data = self._get_clublog_xml_data_for_callsign(call)
|
||||
if clublog_data and "Continent" in clublog_data:
|
||||
continent = clublog_data["Continent"]
|
||||
if not continent:
|
||||
clublog_data = self.get_clublog_api_data_for_callsign(call)
|
||||
clublog_data = self._get_clublog_api_data_for_callsign(call)
|
||||
if clublog_data and "Continent" in clublog_data:
|
||||
continent = clublog_data["Continent"]
|
||||
# Couldn't get anything from Clublog database, try DXCC data
|
||||
if not continent:
|
||||
dxcc_data = self.get_dxcc_data_for_callsign(call)
|
||||
dxcc_data = self._get_dxcc_data_for_callsign(call)
|
||||
# Some DXCCs are in two continents, if so don't use the continent data as we can't be sure
|
||||
if dxcc_data and "continent" in dxcc_data and len(dxcc_data["continent"]) == 1:
|
||||
continent = dxcc_data["continent"][0]
|
||||
return continent
|
||||
|
||||
# Infer a CQ zone from a callsign
|
||||
def infer_cq_zone_from_callsign(self, call):
|
||||
"""Infer a CQ zone from a callsign"""
|
||||
|
||||
try:
|
||||
# Start with the basic country-files.com-based decoder.
|
||||
cqz = self.CALL_INFO_BASIC.get_cqz(call)
|
||||
except (KeyError, ValueError) as e:
|
||||
cqz = self._call_info_basic.get_cqz(call)
|
||||
except (KeyError, ValueError):
|
||||
cqz = None
|
||||
# Couldn't get anything from basic call info database, try QRZ.com
|
||||
if not cqz:
|
||||
qrz_data = self.get_qrz_data_for_callsign(call)
|
||||
qrz_data = self._get_qrz_data_for_callsign(call)
|
||||
if qrz_data and "cqz" in qrz_data:
|
||||
cqz = qrz_data["cqz"]
|
||||
# Couldn't get anything from QRZ.com database, try HamQTH
|
||||
if not cqz:
|
||||
hamqth_data = self.get_hamqth_data_for_callsign(call)
|
||||
hamqth_data = self._get_hamqth_data_for_callsign(call)
|
||||
if hamqth_data and "cq" in hamqth_data:
|
||||
cqz = hamqth_data["cq"]
|
||||
# Couldn't get anything from HamQTH database, try Clublog data
|
||||
if not cqz:
|
||||
clublog_data = self.get_clublog_xml_data_for_callsign(call)
|
||||
clublog_data = self._get_clublog_xml_data_for_callsign(call)
|
||||
if clublog_data and "CQZ" in clublog_data:
|
||||
cqz = clublog_data["CQZ"]
|
||||
if not cqz:
|
||||
clublog_data = self.get_clublog_api_data_for_callsign(call)
|
||||
clublog_data = self._get_clublog_api_data_for_callsign(call)
|
||||
if clublog_data and "CQZ" in clublog_data:
|
||||
cqz = clublog_data["CQZ"]
|
||||
# Couldn't get anything from Clublog database, try DXCC data
|
||||
if not cqz:
|
||||
dxcc_data = self.get_dxcc_data_for_callsign(call)
|
||||
dxcc_data = self._get_dxcc_data_for_callsign(call)
|
||||
# Some DXCCs are in multiple zones, if so don't use the zone data as we can't be sure
|
||||
if dxcc_data and "cq" in dxcc_data and len(dxcc_data["cq"]) == 1:
|
||||
cqz = dxcc_data["cq"][0]
|
||||
return cqz
|
||||
|
||||
# Infer a ITU zone from a callsign
|
||||
def infer_itu_zone_from_callsign(self, call):
|
||||
"""Infer a ITU zone from a callsign"""
|
||||
|
||||
try:
|
||||
# Start with the basic country-files.com-based decoder.
|
||||
ituz = self.CALL_INFO_BASIC.get_ituz(call)
|
||||
except (KeyError, ValueError) as e:
|
||||
ituz = self._call_info_basic.get_ituz(call)
|
||||
except (KeyError, ValueError):
|
||||
ituz = None
|
||||
# Couldn't get anything from basic call info database, try QRZ.com
|
||||
if not ituz:
|
||||
qrz_data = self.get_qrz_data_for_callsign(call)
|
||||
qrz_data = self._get_qrz_data_for_callsign(call)
|
||||
if qrz_data and "ituz" in qrz_data:
|
||||
ituz = qrz_data["ituz"]
|
||||
# Couldn't get anything from QRZ.com database, try HamQTH
|
||||
if not ituz:
|
||||
hamqth_data = self.get_hamqth_data_for_callsign(call)
|
||||
hamqth_data = self._get_hamqth_data_for_callsign(call)
|
||||
if hamqth_data and "itu" in hamqth_data:
|
||||
ituz = hamqth_data["itu"]
|
||||
# Couldn't get anything from HamQTH database, Clublog doesn't provide this, so try DXCC data
|
||||
if not ituz:
|
||||
dxcc_data = self.get_dxcc_data_for_callsign(call)
|
||||
dxcc_data = self._get_dxcc_data_for_callsign(call)
|
||||
# Some DXCCs are in multiple zones, if so don't use the zone data as we can't be sure
|
||||
if dxcc_data and "itu" in dxcc_data and len(dxcc_data["itu"]) == 1:
|
||||
ituz = dxcc_data["itu"]
|
||||
return ituz
|
||||
|
||||
# Get an emoji flag for a given DXCC entity ID
|
||||
def get_flag_for_dxcc(self, dxcc):
|
||||
return self.DXCC_DATA[dxcc]["flag"] if dxcc in self.DXCC_DATA else None
|
||||
"""Get an emoji flag for a given DXCC entity ID"""
|
||||
|
||||
return self._dxcc_data[dxcc]["flag"] if dxcc in self._dxcc_data else None
|
||||
|
||||
# Infer an operator name from a callsign (requires QRZ.com/HamQTH)
|
||||
def infer_name_from_callsign_online_lookup(self, call):
|
||||
data = self.get_qrz_data_for_callsign(call)
|
||||
"""Infer an operator name from a callsign (requires QRZ.com/HamQTH)"""
|
||||
|
||||
data = self._get_qrz_data_for_callsign(call)
|
||||
if data and "fname" in data:
|
||||
name = data["fname"]
|
||||
if "name" in data:
|
||||
name = name + " " + data["name"]
|
||||
return name
|
||||
data = self.get_hamqth_data_for_callsign(call)
|
||||
data = self._get_hamqth_data_for_callsign(call)
|
||||
if data and "nick" in data:
|
||||
return data["nick"]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Infer a latitude and longitude from a callsign (requires QRZ.com/HamQTH)
|
||||
# Coordinates that look default are rejected (apologies if your position really is 0,0, enjoy your voyage)
|
||||
def infer_latlon_from_callsign_online_lookup(self, call):
|
||||
data = self.get_qrz_data_for_callsign(call)
|
||||
if data and "latitude" in data and "longitude" in data and (float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float(data["latitude"]) < 89.9:
|
||||
"""Infer a latitude and longitude from a callsign (requires QRZ.com/HamQTH)
|
||||
Coordinates that look default are rejected (apologies if your position really is 0,0, enjoy your voyage)"""
|
||||
|
||||
data = self._get_qrz_data_for_callsign(call)
|
||||
if data and "latitude" in data and "longitude" in data and (
|
||||
float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float(
|
||||
data["latitude"]) < 89.9:
|
||||
return [float(data["latitude"]), float(data["longitude"])]
|
||||
data = self.get_hamqth_data_for_callsign(call)
|
||||
if data and "latitude" in data and "longitude" in data and (float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float(data["latitude"]) < 89.9:
|
||||
data = self._get_hamqth_data_for_callsign(call)
|
||||
if data and "latitude" in data and "longitude" in data and (
|
||||
float(data["latitude"]) != 0 or float(data["longitude"]) != 0) and -89.9 < float(
|
||||
data["latitude"]) < 89.9:
|
||||
return [float(data["latitude"]), float(data["longitude"])]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Infer a grid locator from a callsign (requires QRZ.com/HamQTH).
|
||||
# Grids that look default are rejected (apologies if your grid really is AA00aa, enjoy your research)
|
||||
def infer_grid_from_callsign_online_lookup(self, call):
|
||||
data = self.get_qrz_data_for_callsign(call)
|
||||
if data and "locator" in data and data["locator"].upper() != "AA00" and data["locator"].upper() != "AA00AA" and data["locator"].upper() != "AA00AA00":
|
||||
"""Infer a grid locator from a callsign (requires QRZ.com/HamQTH).
|
||||
Grids that look default are rejected (apologies if your grid really is AA00aa, enjoy your research)"""
|
||||
|
||||
data = self._get_qrz_data_for_callsign(call)
|
||||
if data and "locator" in data and data["locator"].upper() != "AA00" and data["locator"].upper() != "AA00AA" and \
|
||||
data["locator"].upper() != "AA00AA00":
|
||||
return data["locator"]
|
||||
data = self.get_hamqth_data_for_callsign(call)
|
||||
if data and "grid" in data and data["grid"].upper() != "AA00" and data["grid"].upper() != "AA00AA" and data["grid"].upper() != "AA00AA00":
|
||||
data = self._get_hamqth_data_for_callsign(call)
|
||||
if data and "grid" in data and data["grid"].upper() != "AA00" and data["grid"].upper() != "AA00AA" and data[
|
||||
"grid"].upper() != "AA00AA00":
|
||||
return data["grid"]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Infer a textual QTH from a callsign (requires QRZ.com/HamQTH)
|
||||
def infer_qth_from_callsign_online_lookup(self, call):
|
||||
data = self.get_qrz_data_for_callsign(call)
|
||||
"""Infer a textual QTH from a callsign (requires QRZ.com/HamQTH)"""
|
||||
|
||||
data = self._get_qrz_data_for_callsign(call)
|
||||
if data and "addr2" in data:
|
||||
return data["addr2"]
|
||||
data = self.get_hamqth_data_for_callsign(call)
|
||||
data = self._get_hamqth_data_for_callsign(call)
|
||||
if data and "qth" in data:
|
||||
return data["qth"]
|
||||
else:
|
||||
return None
|
||||
|
||||
# Infer a latitude and longitude from a callsign (using DXCC, probably very inaccurate)
|
||||
def infer_latlon_from_callsign_dxcc(self, call):
|
||||
"""Infer a latitude and longitude from a callsign (using DXCC, probably very inaccurate)"""
|
||||
|
||||
try:
|
||||
data = self.CALL_INFO_BASIC.get_lat_long(call)
|
||||
data = self._call_info_basic.get_lat_long(call)
|
||||
if data and "latitude" in data and "longitude" in data:
|
||||
loc = [float(data["latitude"]), float(data["longitude"])]
|
||||
else:
|
||||
@@ -410,17 +402,18 @@ class LookupHelper:
|
||||
loc = None
|
||||
# Couldn't get anything from basic call info database, try Clublog data
|
||||
if not loc:
|
||||
data = self.get_clublog_xml_data_for_callsign(call)
|
||||
data = self._get_clublog_xml_data_for_callsign(call)
|
||||
if data and "Lat" in data and "Lon" in data:
|
||||
loc = [float(data["Lat"]), float(data["Lon"])]
|
||||
if not loc:
|
||||
data = self.get_clublog_api_data_for_callsign(call)
|
||||
data = self._get_clublog_api_data_for_callsign(call)
|
||||
if data and "Lat" in data and "Lon" in data:
|
||||
loc = [float(data["Lat"]), float(data["Lon"])]
|
||||
return loc
|
||||
|
||||
# Infer a grid locator from a callsign (using DXCC, probably very inaccurate)
|
||||
def infer_grid_from_callsign_dxcc(self, call):
|
||||
"""Infer a grid locator from a callsign (using DXCC, probably very inaccurate)"""
|
||||
|
||||
latlon = self.infer_latlon_from_callsign_dxcc(call)
|
||||
grid = None
|
||||
try:
|
||||
@@ -429,47 +422,28 @@ class LookupHelper:
|
||||
logging.debug("Invalid lat/lon received for DXCC")
|
||||
return grid
|
||||
|
||||
# Infer a mode from the frequency (in Hz) according to the band plan. Just a guess really.
|
||||
def infer_mode_from_frequency(self, freq):
|
||||
try:
|
||||
khz = freq / 1000.0
|
||||
mode = freq_to_band(khz)["mode"]
|
||||
# Some additional common digimode ranges in addition to what the 3rd-party freq_to_band function returns.
|
||||
# This is mostly here just because freq_to_band is very specific about things like FT8 frequencies, and e.g.
|
||||
# a spot at 7074.5 kHz will be indicated as LSB, even though it's clearly in the FT8 range. Future updates
|
||||
# might include other common digimode centres of activity here, but this achieves the main goal of keeping
|
||||
# large numbers of clearly-FT* spots off the list of people filtering out digimodes.
|
||||
if (7074 <= khz < 7077) or (10136 <= khz < 10139) or (14074 <= khz < 14077) or (18100 <= khz < 18103) or (
|
||||
21074 <= khz < 21077) or (24915 <= khz < 24918) or (28074 <= khz < 28077):
|
||||
mode = "FT8"
|
||||
if (7047.5 <= khz < 7050.5) or (10140 <= khz < 10143) or (14080 <= khz < 14083) or (
|
||||
18104 <= khz < 18107) or (21140 <= khz < 21143) or (24919 <= khz < 24922) or (28180 <= khz < 28183):
|
||||
mode = "FT4"
|
||||
return mode
|
||||
except KeyError:
|
||||
return None
|
||||
def _get_qrz_data_for_callsign(self, call):
|
||||
"""Utility method to get QRZ.com data from cache if possible, if not get it from the API and cache it"""
|
||||
|
||||
# Utility method to get QRZ.com data from cache if possible, if not get it from the API and cache it
|
||||
def get_qrz_data_for_callsign(self, call):
|
||||
# Fetch from cache if we can, otherwise fetch from the API and cache it
|
||||
if call in self.QRZ_CALLSIGN_DATA_CACHE:
|
||||
return self.QRZ_CALLSIGN_DATA_CACHE.get(call)
|
||||
elif self.QRZ_AVAILABLE:
|
||||
if call in self._qrz_callsign_data_cache:
|
||||
return self._qrz_callsign_data_cache.get(call)
|
||||
elif self._qrz_available:
|
||||
try:
|
||||
data = self.LOOKUP_LIB_QRZ.lookup_callsign(callsign=call)
|
||||
self.QRZ_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds
|
||||
data = self._lookup_lib_qrz.lookup_callsign(callsign=call)
|
||||
self._qrz_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
|
||||
return data
|
||||
except (KeyError, ValueError):
|
||||
# QRZ had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call.
|
||||
try:
|
||||
data = self.LOOKUP_LIB_QRZ.lookup_callsign(callsign=callinfo.Callinfo.get_homecall(call))
|
||||
self.QRZ_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds
|
||||
data = self._lookup_lib_qrz.lookup_callsign(callsign=callinfo.Callinfo.get_homecall(call))
|
||||
self._qrz_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
|
||||
return data
|
||||
except (KeyError, ValueError):
|
||||
# QRZ had no info for the call, that's OK. Cache a None so we don't try to look this up again
|
||||
self.QRZ_CALLSIGN_DATA_CACHE.add(call, None, expire=604800) # 1 week in seconds
|
||||
self._qrz_callsign_data_cache.add(call, None, expire=604800) # 1 week in seconds
|
||||
return None
|
||||
except (Exception):
|
||||
except Exception:
|
||||
# General exception like a timeout when communicating with QRZ. Return None this time, but don't cache
|
||||
# that, so we can try again next time.
|
||||
logging.error("Exception when looking up QRZ data")
|
||||
@@ -477,16 +451,17 @@ class LookupHelper:
|
||||
else:
|
||||
return None
|
||||
|
||||
# Utility method to get HamQTH data from cache if possible, if not get it from the API and cache it
|
||||
def get_hamqth_data_for_callsign(self, call):
|
||||
def _get_hamqth_data_for_callsign(self, call):
|
||||
"""Utility method to get HamQTH data from cache if possible, if not get it from the API and cache it"""
|
||||
|
||||
# Fetch from cache if we can, otherwise fetch from the API and cache it
|
||||
if call in self.HAMQTH_CALLSIGN_DATA_CACHE:
|
||||
return self.HAMQTH_CALLSIGN_DATA_CACHE.get(call)
|
||||
elif self.HAMQTH_AVAILABLE:
|
||||
if call in self._hamqth_callsign_data_cache:
|
||||
return self._hamqth_callsign_data_cache.get(call)
|
||||
elif self._hamqth_available:
|
||||
try:
|
||||
# First we need to log in and get a session token.
|
||||
session_data = self.HAMQTH_SESSION_LOOKUP_CACHE.get(
|
||||
self.HAMQTH_BASE_URL + "?u=" + urllib.parse.quote_plus(config["hamqth-username"]) +
|
||||
session_data = self._hamqth_session_lookup_cache.get(
|
||||
self._hamqth_base_url + "?u=" + urllib.parse.quote_plus(config["hamqth-username"]) +
|
||||
"&p=" + urllib.parse.quote_plus(config["hamqth-password"]), headers=HTTP_HEADERS).content
|
||||
dict_data = xmltodict.parse(session_data)
|
||||
if "session_id" in dict_data["HamQTH"]["session"]:
|
||||
@@ -495,83 +470,146 @@ class LookupHelper:
|
||||
# Now look up the actual data.
|
||||
try:
|
||||
lookup_data = SEMI_STATIC_URL_DATA_CACHE.get(
|
||||
self.HAMQTH_BASE_URL + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus(
|
||||
self._hamqth_base_url + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus(
|
||||
call) + "&prg=" + HAMQTH_PRG, headers=HTTP_HEADERS).content
|
||||
data = xmltodict.parse(lookup_data)["HamQTH"]["search"]
|
||||
self.HAMQTH_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds
|
||||
self._hamqth_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
|
||||
return data
|
||||
except (KeyError, ValueError):
|
||||
# HamQTH had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call.
|
||||
try:
|
||||
lookup_data = SEMI_STATIC_URL_DATA_CACHE.get(
|
||||
self.HAMQTH_BASE_URL + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus(
|
||||
callinfo.Callinfo.get_homecall(call)) + "&prg=" + HAMQTH_PRG, headers=HTTP_HEADERS).content
|
||||
self._hamqth_base_url + "?id=" + session_id + "&callsign=" + urllib.parse.quote_plus(
|
||||
callinfo.Callinfo.get_homecall(call)) + "&prg=" + HAMQTH_PRG,
|
||||
headers=HTTP_HEADERS).content
|
||||
data = xmltodict.parse(lookup_data)["HamQTH"]["search"]
|
||||
self.HAMQTH_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds
|
||||
self._hamqth_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
|
||||
return data
|
||||
except (KeyError, ValueError):
|
||||
# HamQTH had no info for the call, that's OK. Cache a None so we don't try to look this up again
|
||||
self.HAMQTH_CALLSIGN_DATA_CACHE.add(call, None, expire=604800) # 1 week in seconds
|
||||
self._hamqth_callsign_data_cache.add(call, None, expire=604800) # 1 week in seconds
|
||||
return None
|
||||
|
||||
else:
|
||||
logging.warn("HamQTH login details incorrect, failed to look up with HamQTH.")
|
||||
logging.warning("HamQTH login details incorrect, failed to look up with HamQTH.")
|
||||
except:
|
||||
logging.error("Exception when looking up HamQTH data")
|
||||
return None
|
||||
return None
|
||||
|
||||
def _get_clublog_api_data_for_callsign(self, call):
|
||||
"""Utility method to get Clublog API data from cache if possible, if not get it from the API and cache it"""
|
||||
|
||||
# Utility method to get Clublog API data from cache if possible, if not get it from the API and cache it
|
||||
def get_clublog_api_data_for_callsign(self, call):
|
||||
# Fetch from cache if we can, otherwise fetch from the API and cache it
|
||||
if call in self.CLUBLOG_CALLSIGN_DATA_CACHE:
|
||||
return self.CLUBLOG_CALLSIGN_DATA_CACHE.get(call)
|
||||
elif self.CLUBLOG_API_AVAILABLE:
|
||||
if call in self._clublog_callsign_data_cache:
|
||||
return self._clublog_callsign_data_cache.get(call)
|
||||
elif self._clublog_api_available:
|
||||
try:
|
||||
data = self.LOOKUP_LIB_CLUBLOG_API.lookup_callsign(callsign=call)
|
||||
self.CLUBLOG_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds
|
||||
data = self._lookup_lib_clublog_api.lookup_callsign(callsign=call)
|
||||
self._clublog_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
|
||||
return data
|
||||
except (KeyError, ValueError):
|
||||
# Clublog had no info for the call, but maybe it had prefixes or suffixes. Try again with the base call.
|
||||
try:
|
||||
data = self.LOOKUP_LIB_CLUBLOG_API.lookup_callsign(callsign=callinfo.Callinfo.get_homecall(call))
|
||||
self.CLUBLOG_CALLSIGN_DATA_CACHE.add(call, data, expire=604800) # 1 week in seconds
|
||||
data = self._lookup_lib_clublog_api.lookup_callsign(callsign=callinfo.Callinfo.get_homecall(call))
|
||||
self._clublog_callsign_data_cache.add(call, data, expire=604800) # 1 week in seconds
|
||||
return data
|
||||
except (KeyError, ValueError):
|
||||
# Clublog had no info for the call, that's OK. Cache a None so we don't try to look this up again
|
||||
self.CLUBLOG_CALLSIGN_DATA_CACHE.add(call, None, expire=604800) # 1 week in seconds
|
||||
self._clublog_callsign_data_cache.add(call, None, expire=604800) # 1 week in seconds
|
||||
return None
|
||||
except APIKeyMissingError:
|
||||
# User API key was wrong, warn
|
||||
logging.error("Could not look up via Clublog API, key " + self.CLUBLOG_API_KEY + " was rejected.")
|
||||
logging.error("Could not look up via Clublog API, key " + self._clublog_api_key + " was rejected.")
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
# Utility method to get Clublog XML data from file
|
||||
def get_clublog_xml_data_for_callsign(self, call):
|
||||
if self.CLUBLOG_XML_AVAILABLE:
|
||||
def _get_clublog_xml_data_for_callsign(self, call):
|
||||
"""Utility method to get Clublog XML data from file"""
|
||||
|
||||
if self._clublog_xml_available:
|
||||
try:
|
||||
data = self.LOOKUP_LIB_CLUBLOG_XML.lookup_callsign(callsign=call)
|
||||
data = self._lookup_lib_clublog_xml.lookup_callsign(callsign=call)
|
||||
return data
|
||||
except (KeyError, ValueError):
|
||||
# Clublog had no info for the call, that's OK. Cache a None so we don't try to look this up again
|
||||
self.CLUBLOG_CALLSIGN_DATA_CACHE.add(call, None, expire=604800) # 1 week in seconds
|
||||
self._clublog_callsign_data_cache.add(call, None, expire=604800) # 1 week in seconds
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
# Utility method to get generic DXCC data from our lookup table, if we can find it
|
||||
def get_dxcc_data_for_callsign(self, call):
|
||||
for entry in self.DXCC_DATA.values():
|
||||
def _get_dxcc_data_for_callsign(self, call):
|
||||
"""Utility method to get generic DXCC data from our lookup table, if we can find it"""
|
||||
|
||||
for entry in self._dxcc_data.values():
|
||||
if entry["_prefixRegexCompiled"].match(call):
|
||||
return entry
|
||||
return None
|
||||
|
||||
# Shutdown method to close down any caches neatly.
|
||||
def stop(self):
|
||||
self.QRZ_CALLSIGN_DATA_CACHE.close()
|
||||
self.CLUBLOG_CALLSIGN_DATA_CACHE.close()
|
||||
"""Shutdown method to close down any caches neatly."""
|
||||
|
||||
self._qrz_callsign_data_cache.close()
|
||||
self._clublog_callsign_data_cache.close()
|
||||
|
||||
|
||||
# Singleton object
|
||||
lookup_helper = LookupHelper()
|
||||
|
||||
def infer_mode_from_comment(comment):
|
||||
"""Infer a mode from the comment"""
|
||||
|
||||
for mode in ALL_MODES:
|
||||
if mode in comment.upper():
|
||||
return mode
|
||||
for mode in MODE_ALIASES.keys():
|
||||
if mode in comment.upper():
|
||||
return MODE_ALIASES[mode]
|
||||
return None
|
||||
|
||||
|
||||
def infer_mode_type_from_mode(mode):
|
||||
"""Infer a "mode family" from a mode."""
|
||||
|
||||
if mode.upper() in CW_MODES:
|
||||
return "CW"
|
||||
elif mode.upper() in PHONE_MODES:
|
||||
return "PHONE"
|
||||
elif mode.upper() in DATA_MODES:
|
||||
return "DATA"
|
||||
else:
|
||||
if mode.upper() != "OTHER":
|
||||
logging.warning("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
|
||||
return None
|
||||
|
||||
|
||||
def infer_band_from_freq(freq):
|
||||
"""Infer a band from a frequency in Hz"""
|
||||
|
||||
for b in BANDS:
|
||||
if b.start_freq <= freq <= b.end_freq:
|
||||
return b
|
||||
return UNKNOWN_BAND
|
||||
|
||||
|
||||
def infer_mode_from_frequency(freq):
|
||||
"""Infer a mode from the frequency (in Hz) according to the band plan. Just a guess really."""
|
||||
|
||||
try:
|
||||
khz = freq / 1000.0
|
||||
mode = freq_to_band(khz)["mode"]
|
||||
# Some additional common digimode ranges in addition to what the 3rd-party freq_to_band function returns.
|
||||
# This is mostly here just because freq_to_band is very specific about things like FT8 frequencies, and e.g.
|
||||
# a spot at 7074.5 kHz will be indicated as LSB, even though it's clearly in the FT8 range. Future updates
|
||||
# might include other common digimode centres of activity here, but this achieves the main goal of keeping
|
||||
# large numbers of clearly-FT* spots off the list of people filtering out digimodes.
|
||||
if (7074 <= khz < 7077) or (10136 <= khz < 10139) or (14074 <= khz < 14077) or (18100 <= khz < 18103) or (
|
||||
21074 <= khz < 21077) or (24915 <= khz < 24918) or (28074 <= khz < 28077):
|
||||
mode = "FT8"
|
||||
if (7047.5 <= khz < 7050.5) or (10140 <= khz < 10143) or (14080 <= khz < 14083) or (
|
||||
18104 <= khz < 18107) or (21140 <= khz < 21143) or (24919 <= khz < 24922) or (28180 <= khz < 28183):
|
||||
mode = "FT4"
|
||||
return mode
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
@@ -31,6 +31,7 @@ memory_use_gauge = Gauge(
|
||||
)
|
||||
|
||||
|
||||
# Get a Prometheus metrics response for the web server
|
||||
def get_metrics():
|
||||
"""Get a Prometheus metrics response for the web server"""
|
||||
|
||||
return generate_latest(registry)
|
||||
|
||||
@@ -8,18 +8,20 @@ from core.constants import SIGS, HTTP_HEADERS
|
||||
from core.geo_utils import wab_wai_square_to_lat_lon
|
||||
|
||||
|
||||
# Utility function to get the regex string for a SIG reference for a named SIG. If no match is found, None will be returned.
|
||||
def get_ref_regex_for_sig(sig):
|
||||
"""Utility function to get the regex string for a SIG reference for a named SIG. If no match is found, None will be returned."""
|
||||
|
||||
for s in SIGS:
|
||||
if s.name.upper() == sig.upper():
|
||||
return s.ref_regex
|
||||
return None
|
||||
|
||||
|
||||
# Look up details of a SIG reference (e.g. POTA park) such as name, lat/lon, and grid. Takes in a sig_ref object which
|
||||
# must at minimum have a "sig" and an "id". The rest of the object will be populated and returned.
|
||||
# Note there is currently no support for KRMNPA location lookup, see issue #61.
|
||||
def populate_sig_ref_info(sig_ref):
|
||||
"""Look up details of a SIG reference (e.g. POTA park) such as name, lat/lon, and grid. Takes in a sig_ref object which
|
||||
must at minimum have a "sig" and an "id". The rest of the object will be populated and returned.
|
||||
Note there is currently no support for KRMNPA location lookup, see issue #61."""
|
||||
|
||||
if sig_ref.sig is None or sig_ref.id is None:
|
||||
logging.warning("Failed to look up sig_ref info, sig or id were not set.")
|
||||
|
||||
@@ -67,7 +69,7 @@ def populate_sig_ref_info(sig_ref):
|
||||
sig_ref.longitude = data["longitude"] if "longitude" in data else None
|
||||
elif sig.upper() == "WWFF":
|
||||
wwff_csv_data = SEMI_STATIC_URL_DATA_CACHE.get("https://wwff.co/wwff-data/wwff_directory.csv",
|
||||
headers=HTTP_HEADERS)
|
||||
headers=HTTP_HEADERS)
|
||||
wwff_dr = csv.DictReader(wwff_csv_data.content.decode().splitlines())
|
||||
for row in wwff_dr:
|
||||
if row["reference"] == ref_id:
|
||||
@@ -75,7 +77,8 @@ def populate_sig_ref_info(sig_ref):
|
||||
sig_ref.url = "https://wwff.co/directory/?showRef=" + ref_id
|
||||
sig_ref.grid = row["iaruLocator"] if "iaruLocator" in row and row["iaruLocator"] != "-" else None
|
||||
sig_ref.latitude = float(row["latitude"]) if "latitude" in row and row["latitude"] != "-" else None
|
||||
sig_ref.longitude = float(row["longitude"]) if "longitude" in row and row["longitude"] != "-" else None
|
||||
sig_ref.longitude = float(row["longitude"]) if "longitude" in row and row[
|
||||
"longitude"] != "-" else None
|
||||
break
|
||||
elif sig.upper() == "SIOTA":
|
||||
siota_csv_data = SEMI_STATIC_URL_DATA_CACHE.get("https://www.silosontheair.com/data/silos.csv",
|
||||
@@ -124,7 +127,8 @@ def populate_sig_ref_info(sig_ref):
|
||||
sig_ref.name = sig_ref.id
|
||||
sig_ref.url = "https://www.beachesontheair.com/beaches/" + sig_ref.name.lower().replace(" ", "-")
|
||||
elif sig.upper() == "LLOTA":
|
||||
data = SEMI_STATIC_URL_DATA_CACHE.get("https://llota.app/api/public/references", headers=HTTP_HEADERS).json()
|
||||
data = SEMI_STATIC_URL_DATA_CACHE.get("https://llota.app/api/public/references",
|
||||
headers=HTTP_HEADERS).json()
|
||||
if data:
|
||||
for ref in data:
|
||||
if ref["reference_code"] == ref_id:
|
||||
|
||||
@@ -10,75 +10,82 @@ from core.constants import SOFTWARE_VERSION
|
||||
from core.prometheus_metrics_handler import memory_use_gauge, spots_gauge, alerts_gauge
|
||||
|
||||
|
||||
# Provides a timed update of the application's status data.
|
||||
class StatusReporter:
|
||||
"""Provides a timed update of the application's status data."""
|
||||
|
||||
# Constructor
|
||||
def __init__(self, status_data, run_interval, web_server, cleanup_timer, spots, spot_providers, alerts,
|
||||
alert_providers):
|
||||
self.status_data = status_data
|
||||
self.run_interval = run_interval
|
||||
self.web_server = web_server
|
||||
self.cleanup_timer = cleanup_timer
|
||||
self.spots = spots
|
||||
self.spot_providers = spot_providers
|
||||
self.alerts = alerts
|
||||
self.alert_providers = alert_providers
|
||||
"""Constructor"""
|
||||
|
||||
self._status_data = status_data
|
||||
self._run_interval = run_interval
|
||||
self._web_server = web_server
|
||||
self._cleanup_timer = cleanup_timer
|
||||
self._spots = spots
|
||||
self._spot_providers = spot_providers
|
||||
self._alerts = alerts
|
||||
self._alert_providers = alert_providers
|
||||
self._thread = None
|
||||
self._stop_event = Event()
|
||||
self.startup_time = datetime.now(pytz.UTC)
|
||||
self._startup_time = datetime.now(pytz.UTC)
|
||||
|
||||
self.status_data["software-version"] = SOFTWARE_VERSION
|
||||
self.status_data["server-owner-callsign"] = SERVER_OWNER_CALLSIGN
|
||||
self._status_data["software-version"] = SOFTWARE_VERSION
|
||||
self._status_data["server-owner-callsign"] = SERVER_OWNER_CALLSIGN
|
||||
|
||||
# Start the reporter thread
|
||||
def start(self):
|
||||
"""Start the reporter thread"""
|
||||
|
||||
self._thread = Thread(target=self._run, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
# Stop any threads and prepare for application shutdown
|
||||
def stop(self):
|
||||
"""Stop any threads and prepare for application shutdown"""
|
||||
|
||||
self._stop_event.set()
|
||||
|
||||
# Thread entry point: report immediately on startup, then on each interval until stopped
|
||||
def _run(self):
|
||||
"""Thread entry point: report immediately on startup, then on each interval until stopped"""
|
||||
|
||||
while True:
|
||||
self._report()
|
||||
if self._stop_event.wait(timeout=self.run_interval):
|
||||
if self._stop_event.wait(timeout=self._run_interval):
|
||||
break
|
||||
|
||||
# Write status information
|
||||
def _report(self):
|
||||
self.status_data["uptime"] = (datetime.now(pytz.UTC) - self.startup_time).total_seconds()
|
||||
self.status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3)
|
||||
self.status_data["num_spots"] = len(self.spots)
|
||||
self.status_data["num_alerts"] = len(self.alerts)
|
||||
self.status_data["spot_providers"] = list(
|
||||
"""Write status information"""
|
||||
|
||||
self._status_data["uptime"] = (datetime.now(pytz.UTC) - self._startup_time).total_seconds()
|
||||
self._status_data["mem_use_mb"] = round(psutil.Process(os.getpid()).memory_info().rss / (1024 * 1024), 3)
|
||||
self._status_data["num_spots"] = len(self._spots)
|
||||
self._status_data["num_alerts"] = len(self._alerts)
|
||||
self._status_data["spot_providers"] = list(
|
||||
map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status,
|
||||
"last_updated": p.last_update_time.replace(
|
||||
tzinfo=pytz.UTC).timestamp() if p.last_update_time.year > 2000 else 0,
|
||||
"last_spot": p.last_spot_time.replace(
|
||||
tzinfo=pytz.UTC).timestamp() if p.last_spot_time.year > 2000 else 0}, self.spot_providers))
|
||||
self.status_data["alert_providers"] = list(
|
||||
tzinfo=pytz.UTC).timestamp() if p.last_spot_time.year > 2000 else 0},
|
||||
self._spot_providers))
|
||||
self._status_data["alert_providers"] = list(
|
||||
map(lambda p: {"name": p.name, "enabled": p.enabled, "status": p.status,
|
||||
"last_updated": p.last_update_time.replace(
|
||||
tzinfo=pytz.UTC).timestamp() if p.last_update_time.year > 2000 else 0},
|
||||
self.alert_providers))
|
||||
self.status_data["cleanup"] = {"status": self.cleanup_timer.status,
|
||||
"last_ran": self.cleanup_timer.last_cleanup_time.replace(
|
||||
tzinfo=pytz.UTC).timestamp() if self.cleanup_timer.last_cleanup_time else 0}
|
||||
self.status_data["webserver"] = {"status": self.web_server.web_server_metrics["status"],
|
||||
"last_api_access": self.web_server.web_server_metrics[
|
||||
"last_api_access_time"].replace(
|
||||
tzinfo=pytz.UTC).timestamp() if self.web_server.web_server_metrics[
|
||||
"last_api_access_time"] else 0,
|
||||
"api_access_count": self.web_server.web_server_metrics["api_access_counter"],
|
||||
"last_page_access": self.web_server.web_server_metrics[
|
||||
"last_page_access_time"].replace(
|
||||
tzinfo=pytz.UTC).timestamp() if self.web_server.web_server_metrics[
|
||||
"last_page_access_time"] else 0,
|
||||
"page_access_count": self.web_server.web_server_metrics["page_access_counter"]}
|
||||
self._alert_providers))
|
||||
self._status_data["cleanup"] = {"status": self._cleanup_timer.status,
|
||||
"last_ran": self._cleanup_timer.last_cleanup_time.replace(
|
||||
tzinfo=pytz.UTC).timestamp() if self._cleanup_timer.last_cleanup_time else 0}
|
||||
self._status_data["webserver"] = {"status": self._web_server.web_server_metrics["status"],
|
||||
"last_api_access": self._web_server.web_server_metrics[
|
||||
"last_api_access_time"].replace(
|
||||
tzinfo=pytz.UTC).timestamp() if self._web_server.web_server_metrics[
|
||||
"last_api_access_time"] else 0,
|
||||
"api_access_count": self._web_server.web_server_metrics["api_access_counter"],
|
||||
"last_page_access": self._web_server.web_server_metrics[
|
||||
"last_page_access_time"].replace(
|
||||
tzinfo=pytz.UTC).timestamp() if self._web_server.web_server_metrics[
|
||||
"last_page_access_time"] else 0,
|
||||
"page_access_count": self._web_server.web_server_metrics["page_access_counter"]}
|
||||
|
||||
# Update Prometheus metrics
|
||||
memory_use_gauge.set(psutil.Process(os.getpid()).memory_info().rss * 1024)
|
||||
spots_gauge.set(len(self.spots))
|
||||
alerts_gauge.set(len(self.alerts))
|
||||
memory_use_gauge.set(psutil.Process(os.getpid()).memory_info().rss)
|
||||
spots_gauge.set(len(self._spots))
|
||||
alerts_gauge.set(len(self._alerts))
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
# Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things.
|
||||
# Just converts objects to dict. Try to avoid doing anything clever here when serialising spots, because we also need
|
||||
# to receive spots without complex handling.
|
||||
def serialize_everything(obj):
|
||||
"""Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things.
|
||||
Just converts objects to dict. Try to avoid doing anything clever here when serialising spots, because we also need
|
||||
to receive spots without complex handling."""
|
||||
return obj.__dict__
|
||||
|
||||
|
||||
# Empty a queue
|
||||
def empty_queue(q):
|
||||
"""Empty a queue"""
|
||||
|
||||
while not q.empty():
|
||||
try:
|
||||
q.get_nowait()
|
||||
except:
|
||||
break
|
||||
break
|
||||
|
||||
@@ -10,9 +10,10 @@ from core.lookup_helper import lookup_helper
|
||||
from core.sig_utils import populate_sig_ref_info
|
||||
|
||||
|
||||
# Data class that defines an alert.
|
||||
@dataclass
|
||||
class Alert:
|
||||
"""Data class that defines an alert."""
|
||||
|
||||
# Unique identifier for the alert
|
||||
id: str = None
|
||||
# Callsigns of the operators that has been alerted
|
||||
@@ -60,8 +61,9 @@ class Alert:
|
||||
# The ID the source gave it, if any.
|
||||
source_id: str = None
|
||||
|
||||
# Infer missing parameters where possible
|
||||
def infer_missing(self):
|
||||
"""Infer missing parameters where possible"""
|
||||
|
||||
# If we somehow don't have a start time, set it to zero so it sorts off the bottom of any list but
|
||||
# clients can still reliably parse it as a number.
|
||||
if not self.start_time:
|
||||
@@ -103,7 +105,7 @@ class Alert:
|
||||
|
||||
# If the spot itself doesn't have a SIG yet, but we have at least one SIG reference, take that reference's SIG
|
||||
# and apply it to the whole spot.
|
||||
if self.sig_refs and len(self.sig_refs) > 0 and not self.sig:
|
||||
if self.sig_refs and len(self.sig_refs) > 0 and self.sig_refs[0] and not self.sig:
|
||||
self.sig = self.sig_refs[0].sig
|
||||
|
||||
# DX operator details lookup, using QRZ.com. This should be the last resort compared to taking the data from
|
||||
@@ -122,14 +124,16 @@ class Alert:
|
||||
self_copy.received_time_iso = ""
|
||||
self.id = hashlib.sha256(str(self_copy).encode("utf-8")).hexdigest()
|
||||
|
||||
# JSON serialise
|
||||
def to_json(self):
|
||||
"""JSON serialise"""
|
||||
|
||||
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True)
|
||||
|
||||
# Decide if this alert has expired (in which case it should not be added to the system in the first place, and not
|
||||
# returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as
|
||||
# either having an end_time in the past, or if it only has a start_time, then that start time was more than 3 hours
|
||||
# ago. If it somehow doesn't have a start_time either, it is considered to be expired.
|
||||
def expired(self):
|
||||
"""Decide if this alert has expired (in which case it should not be added to the system in the first place, and not
|
||||
returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as
|
||||
either having an end_time in the past, or if it only has a start_time, then that start time was more than 3 hours
|
||||
ago. If it somehow doesn't have a start_time either, it is considered to be expired."""
|
||||
|
||||
return not self.start_time or (self.end_time and self.end_time < datetime.now(pytz.UTC).timestamp()) or (
|
||||
not self.end_time and self.start_time < (datetime.now(pytz.UTC) - timedelta(hours=3)).timestamp())
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
# Data class that defines a band.
|
||||
|
||||
@dataclass
|
||||
class Band:
|
||||
"""Data class that defines a band."""
|
||||
|
||||
# Band name
|
||||
name: str
|
||||
# Start frequency, in Hz
|
||||
start_freq: float
|
||||
# Stop frequency, in Hz
|
||||
end_freq: float
|
||||
end_freq: float
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
# Data class that defines a Special Interest Group.
|
||||
|
||||
@dataclass
|
||||
class SIG:
|
||||
"""Data class that defines a Special Interest Group."""
|
||||
|
||||
# SIG name, e.g. "POTA"
|
||||
name: str
|
||||
# Description, e.g. "Parks on the Air"
|
||||
description: str
|
||||
# Regex matcher for references, e.g. for POTA r"[A-Z]{2}\-\d+".
|
||||
ref_regex: str = None
|
||||
ref_regex: str = None
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
# Data class that defines a Special Interest Group "info" or reference. As well as the basic reference ID we include a
|
||||
# name and a lookup URL.
|
||||
|
||||
@dataclass
|
||||
class SIGRef:
|
||||
"""Data class that defines a Special Interest Group "info" or reference. As well as the basic reference ID we include a
|
||||
name and a lookup URL."""
|
||||
|
||||
# Reference ID, e.g. "GB-0001".
|
||||
id: str
|
||||
# SIG that this reference is in, e.g. "POTA".
|
||||
@@ -19,4 +21,4 @@ class SIGRef:
|
||||
# Maidenhead grid reference of the reference, if known.
|
||||
grid: str = None
|
||||
# Activation score. SOTA only
|
||||
activation_score: int = None
|
||||
activation_score: int = None
|
||||
|
||||
55
data/spot.py
55
data/spot.py
@@ -12,14 +12,16 @@ from pyhamtools.locator import locator_to_latlong, latlong_to_locator
|
||||
from core.config import MAX_SPOT_AGE
|
||||
from core.constants import MODE_ALIASES
|
||||
from core.geo_utils import lat_lon_to_cq_zone, lat_lon_to_itu_zone
|
||||
from core.lookup_helper import lookup_helper
|
||||
from core.lookup_helper import lookup_helper, infer_band_from_freq, infer_mode_from_comment, infer_mode_from_frequency, \
|
||||
infer_mode_type_from_mode
|
||||
from core.sig_utils import populate_sig_ref_info, ANY_SIG_REGEX, get_ref_regex_for_sig
|
||||
from data.sig_ref import SIGRef
|
||||
|
||||
|
||||
# Data class that defines a spot.
|
||||
@dataclass
|
||||
class Spot:
|
||||
"""Data class that defines a spot."""
|
||||
|
||||
# Unique identifier for the spot
|
||||
id: str = None
|
||||
|
||||
@@ -129,8 +131,9 @@ class Spot:
|
||||
# The ID the source gave it, if any.
|
||||
source_id: str = None
|
||||
|
||||
# Infer missing parameters where possible
|
||||
def infer_missing(self):
|
||||
"""Infer missing parameters where possible"""
|
||||
|
||||
# If we somehow don't have a spot time, set it to zero so it sorts off the bottom of any list but
|
||||
# clients can still reliably parse it as a number.
|
||||
if not self.time:
|
||||
@@ -186,7 +189,8 @@ class Spot:
|
||||
|
||||
# Spotter country, continent, zones etc. from callsign.
|
||||
# DE call with no digits, or APRS servers starting "T2" are not things we can look up location for
|
||||
if self.de_call and any(char.isdigit() for char in self.de_call) and not (self.de_call.startswith("T2") and self.source == "APRS-IS"):
|
||||
if self.de_call and any(char.isdigit() for char in self.de_call) and not (
|
||||
self.de_call.startswith("T2") and self.source == "APRS-IS"):
|
||||
if not self.de_country:
|
||||
self.de_country = lookup_helper.infer_country_from_callsign(self.de_call)
|
||||
if not self.de_continent:
|
||||
@@ -198,17 +202,17 @@ class Spot:
|
||||
|
||||
# Band from frequency
|
||||
if self.freq and not self.band:
|
||||
band = lookup_helper.infer_band_from_freq(self.freq)
|
||||
band = infer_band_from_freq(self.freq)
|
||||
self.band = band.name
|
||||
|
||||
# Mode from comments or bandplan
|
||||
if self.mode:
|
||||
self.mode_source = "SPOT"
|
||||
if self.comment and not self.mode:
|
||||
self.mode = lookup_helper.infer_mode_from_comment(self.comment)
|
||||
self.mode = infer_mode_from_comment(self.comment)
|
||||
self.mode_source = "COMMENT"
|
||||
if self.freq and not self.mode:
|
||||
self.mode = lookup_helper.infer_mode_from_frequency(self.freq)
|
||||
self.mode = infer_mode_from_frequency(self.freq)
|
||||
self.mode_source = "BANDPLAN"
|
||||
|
||||
# Normalise mode if necessary.
|
||||
@@ -217,7 +221,7 @@ class Spot:
|
||||
|
||||
# Mode type from mode
|
||||
if self.mode and not self.mode_type:
|
||||
self.mode_type = lookup_helper.infer_mode_type_from_mode(self.mode)
|
||||
self.mode_type = infer_mode_type_from_mode(self.mode)
|
||||
|
||||
# If we have a latitude or grid at this point, it can only have been provided by the spot itself
|
||||
if self.dx_latitude or self.dx_grid:
|
||||
@@ -235,7 +239,7 @@ class Spot:
|
||||
if regex:
|
||||
all_comment_ref_matches = re.finditer(r"(^|\W)(" + regex + r")(^|\W)", self.comment, re.IGNORECASE)
|
||||
for ref_match in all_comment_ref_matches:
|
||||
self.append_sig_ref_if_missing(SIGRef(id=ref_match.group(2).upper(), sig=sig))
|
||||
self._append_sig_ref_if_missing(SIGRef(id=ref_match.group(2).upper(), sig=sig))
|
||||
|
||||
# See if the comment looks like it contains any SIGs (and optionally SIG references) that we can
|
||||
# add to the spot. This should catch cluster spot comments like "POTA GB-0001 WWFF GFF-0001" and e.g. POTA
|
||||
@@ -253,9 +257,10 @@ class Spot:
|
||||
# If so, add that to the sig_refs list for this spot.
|
||||
ref_regex = get_ref_regex_for_sig(found_sig)
|
||||
if ref_regex:
|
||||
ref_matches = re.finditer(r"(^|\W)" + found_sig + r"($|\W)(" + ref_regex + r")($|\W)", self.comment, re.IGNORECASE)
|
||||
ref_matches = re.finditer(r"(^|\W)" + found_sig + r"($|\W)(" + ref_regex + r")($|\W)", self.comment,
|
||||
re.IGNORECASE)
|
||||
for ref_match in ref_matches:
|
||||
self.append_sig_ref_if_missing(SIGRef(id=ref_match.group(3).upper(), sig=found_sig))
|
||||
self._append_sig_ref_if_missing(SIGRef(id=ref_match.group(3).upper(), sig=found_sig))
|
||||
|
||||
# Fetch SIG data. In case a particular API doesn't provide a full set of name, lat, lon & grid for a reference
|
||||
# in its initial call, we use this code to populate the rest of the data. This includes working out grid refs
|
||||
@@ -343,12 +348,13 @@ class Spot:
|
||||
# DX Location is "good" if it is from a spot, or from QRZ if the callsign doesn't contain a slash, so the operator
|
||||
# is likely at home.
|
||||
self.dx_location_good = self.dx_latitude and self.dx_longitude and (
|
||||
self.dx_location_source == "SPOT" or self.dx_location_source == "SIG REF LOOKUP"
|
||||
or self.dx_location_source == "WAB/WAI GRID"
|
||||
or (self.dx_location_source == "HOME QTH" and not "/" in self.dx_call))
|
||||
self.dx_location_source == "SPOT" or self.dx_location_source == "SIG REF LOOKUP"
|
||||
or self.dx_location_source == "WAB/WAI GRID"
|
||||
or (self.dx_location_source == "HOME QTH" and not "/" in self.dx_call))
|
||||
|
||||
# DE with no digits and APRS servers starting "T2" are not things we can look up location for
|
||||
if self.de_call and any(char.isdigit() for char in self.de_call) and not (self.de_call.startswith("T2") and self.source == "APRS-IS"):
|
||||
if self.de_call and any(char.isdigit() for char in self.de_call) and not (
|
||||
self.de_call.startswith("T2") and self.source == "APRS-IS"):
|
||||
# DE operator position lookup, using QRZ.com.
|
||||
if not self.de_latitude:
|
||||
latlon = lookup_helper.infer_latlon_from_callsign_online_lookup(self.de_call)
|
||||
@@ -375,12 +381,14 @@ class Spot:
|
||||
self_copy.received_time_iso = ""
|
||||
self.id = hashlib.sha256(str(self_copy).encode("utf-8")).hexdigest()
|
||||
|
||||
# JSON sspoterialise
|
||||
def to_json(self):
|
||||
"""JSON serialise"""
|
||||
|
||||
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True)
|
||||
|
||||
# Append a sig_ref to the list, so long as it's not already there.
|
||||
def append_sig_ref_if_missing(self, new_sig_ref):
|
||||
def _append_sig_ref_if_missing(self, new_sig_ref):
|
||||
"""Append a sig_ref to the list, so long as it's not already there."""
|
||||
|
||||
if not self.sig_refs:
|
||||
self.sig_refs = []
|
||||
new_sig_ref.id = new_sig_ref.id.strip().upper()
|
||||
@@ -392,9 +400,10 @@ class Spot:
|
||||
return
|
||||
self.sig_refs.append(new_sig_ref)
|
||||
|
||||
# Decide if this spot has expired (in which case it should not be added to the system in the first place, and not
|
||||
# returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as
|
||||
# either having a time further ago than the server's MAX_SPOT_AGE. If it somehow doesn't have a time either, it is
|
||||
# considered to be expired.
|
||||
def expired(self):
|
||||
return not self.time or self.time < (datetime.now(pytz.UTC) - timedelta(seconds=MAX_SPOT_AGE)).timestamp()
|
||||
"""Decide if this spot has expired (in which case it should not be added to the system in the first place, and not
|
||||
returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as
|
||||
either having a time further ago than the server's MAX_SPOT_AGE. If it somehow doesn't have a time either, it is
|
||||
considered to be expired."""
|
||||
|
||||
return not self.time or self.time < (datetime.now(pytz.UTC) - timedelta(seconds=MAX_SPOT_AGE)).timestamp()
|
||||
|
||||
@@ -8,7 +8,7 @@ import tornado
|
||||
|
||||
from core.config import ALLOW_SPOTTING, MAX_SPOT_AGE
|
||||
from core.constants import UNKNOWN_BAND
|
||||
from core.lookup_helper import lookup_helper
|
||||
from core.lookup_helper import infer_band_from_freq
|
||||
from core.prometheus_metrics_handler import api_requests_counter
|
||||
from core.sig_utils import get_ref_regex_for_sig
|
||||
from core.utils import serialize_everything
|
||||
@@ -16,33 +16,36 @@ from data.sig_ref import SIGRef
|
||||
from data.spot import Spot
|
||||
|
||||
|
||||
# API request handler for /api/v1/spot (POST)
|
||||
class APISpotHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/spot (POST)"""
|
||||
|
||||
def initialize(self, spots, web_server_metrics):
|
||||
self.spots = spots
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._spots = spots
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def post(self):
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
# Reject if not allowed
|
||||
if not ALLOW_SPOTTING:
|
||||
self.set_status(401)
|
||||
self.write(json.dumps("Error - this server does not allow new spots to be added via the API.",
|
||||
default=serialize_everything))
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Reject if format not json
|
||||
if 'Content-Type' not in self.request.headers or self.request.headers.get('Content-Type') != "application/json":
|
||||
if 'Content-Type' not in self.request.headers or self.request.headers.get(
|
||||
'Content-Type') != "application/json":
|
||||
self.set_status(415)
|
||||
self.write(json.dumps("Error - request Content-Type must be application/json", default=serialize_everything))
|
||||
self.write(
|
||||
json.dumps("Error - request Content-Type must be application/json", default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
@@ -72,7 +75,7 @@ class APISpotHandler(tornado.web.RequestHandler):
|
||||
if not spot.time or not spot.dx_call or not spot.freq or not spot.de_call:
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - 'time', 'dx_call', 'freq' and 'de_call' must be provided as a minimum.",
|
||||
default=serialize_everything))
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
@@ -81,23 +84,23 @@ class APISpotHandler(tornado.web.RequestHandler):
|
||||
if not re.match(r"^[A-Za-z0-9/\-]*$", spot.dx_call):
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - '" + spot.dx_call + "' does not look like a valid callsign.",
|
||||
default=serialize_everything))
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
if not re.match(r"^[A-Za-z0-9/\-]*$", spot.de_call):
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - '" + spot.de_call + "' does not look like a valid callsign.",
|
||||
default=serialize_everything))
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Reject if frequency not in a known band
|
||||
if lookup_helper.infer_band_from_freq(spot.freq) == UNKNOWN_BAND:
|
||||
if infer_band_from_freq(spot.freq) == UNKNOWN_BAND:
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - Frequency of " + str(spot.freq / 1000.0) + "kHz is not in a known band.",
|
||||
default=serialize_everything))
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
@@ -108,7 +111,7 @@ class APISpotHandler(tornado.web.RequestHandler):
|
||||
spot.dx_grid.upper()):
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - '" + spot.dx_grid + "' does not look like a valid Maidenhead grid.",
|
||||
default=serialize_everything))
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
@@ -127,7 +130,7 @@ class APISpotHandler(tornado.web.RequestHandler):
|
||||
# infer missing data, and add it to our database.
|
||||
spot.source = "API"
|
||||
spot.infer_missing()
|
||||
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
|
||||
self._spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
|
||||
|
||||
self.write(json.dumps("OK", default=serialize_everything))
|
||||
self.set_status(201)
|
||||
|
||||
@@ -14,18 +14,19 @@ SSE_HANDLER_MAX_QUEUE_SIZE = 100
|
||||
SSE_HANDLER_QUEUE_CHECK_INTERVAL = 5000
|
||||
|
||||
|
||||
# API request handler for /api/v1/alerts
|
||||
class APIAlertsHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/alerts"""
|
||||
|
||||
def initialize(self, alerts, web_server_metrics):
|
||||
self.alerts = alerts
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._alerts = alerts
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def get(self):
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
@@ -33,7 +34,7 @@ class APIAlertsHandler(tornado.web.RequestHandler):
|
||||
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
|
||||
# Fetch all alerts matching the query
|
||||
data = get_alert_list_with_filters(self.alerts, query_params)
|
||||
data = get_alert_list_with_filters(self._alerts, query_params)
|
||||
self.write(json.dumps(data, default=serialize_everything))
|
||||
self.set_status(200)
|
||||
except ValueError as e:
|
||||
@@ -47,78 +48,82 @@ class APIAlertsHandler(tornado.web.RequestHandler):
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
# API request handler for /api/v1/alerts/stream
|
||||
class APIAlertsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
|
||||
def initialize(self, sse_alert_queues, web_server_metrics):
|
||||
self.sse_alert_queues = sse_alert_queues
|
||||
self.web_server_metrics = web_server_metrics
|
||||
|
||||
# Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data
|
||||
class APIAlertsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
|
||||
"""API request handler for /api/v1/alerts/stream"""
|
||||
|
||||
def initialize(self, sse_alert_queues, web_server_metrics):
|
||||
self._sse_alert_queues = sse_alert_queues
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def custom_headers(self):
|
||||
"""Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data"""
|
||||
|
||||
return {"Cache-Control": "no-store",
|
||||
"X-Accel-Buffering": "no"}
|
||||
|
||||
def open(self):
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
# reduce that to just the first entry, and convert bytes to string
|
||||
self.query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
self._query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
|
||||
# Create a alert queue and add it to the web server's list. The web server will fill this when alerts arrive
|
||||
self.alert_queue = Queue(maxsize=SSE_HANDLER_MAX_QUEUE_SIZE)
|
||||
self.sse_alert_queues.append(self.alert_queue)
|
||||
self._alert_queue = Queue(maxsize=SSE_HANDLER_MAX_QUEUE_SIZE)
|
||||
self._sse_alert_queues.append(self._alert_queue)
|
||||
|
||||
# Set up a timed callback to check if anything is in the queue
|
||||
self.heartbeat = tornado.ioloop.PeriodicCallback(self._callback, SSE_HANDLER_QUEUE_CHECK_INTERVAL)
|
||||
self.heartbeat.start()
|
||||
self._heartbeat = tornado.ioloop.PeriodicCallback(self._callback, SSE_HANDLER_QUEUE_CHECK_INTERVAL)
|
||||
self._heartbeat.start()
|
||||
|
||||
except Exception as e:
|
||||
logging.warn("Exception when serving SSE socket", e)
|
||||
logging.warning("Exception when serving SSE socket", e)
|
||||
|
||||
# When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it
|
||||
def close(self):
|
||||
"""When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it"""
|
||||
|
||||
try:
|
||||
if self.alert_queue in self.sse_alert_queues:
|
||||
self.sse_alert_queues.remove(self.alert_queue)
|
||||
empty_queue(self.alert_queue)
|
||||
if self._alert_queue in self._sse_alert_queues:
|
||||
self._sse_alert_queues.remove(self._alert_queue)
|
||||
empty_queue(self._alert_queue)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
self.heartbeat.stop()
|
||||
self._heartbeat.stop()
|
||||
except:
|
||||
pass
|
||||
self.alert_queue = None
|
||||
self._alert_queue = None
|
||||
super().close()
|
||||
|
||||
# Callback to check if anything has arrived in the queue, and if so send it to the client
|
||||
def _callback(self):
|
||||
"""Callback to check if anything has arrived in the queue, and if so send it to the client"""
|
||||
|
||||
try:
|
||||
if self.alert_queue:
|
||||
while not self.alert_queue.empty():
|
||||
alert = self.alert_queue.get()
|
||||
if self._alert_queue:
|
||||
while not self._alert_queue.empty():
|
||||
alert = self._alert_queue.get()
|
||||
# If the new alert matches our param filters, send it to the client. If not, ignore it.
|
||||
if alert_allowed_by_query(alert, self.query_params):
|
||||
if alert_allowed_by_query(alert, self._query_params):
|
||||
self.write_message(msg=json.dumps(alert, default=serialize_everything))
|
||||
|
||||
if self.alert_queue not in self.sse_alert_queues:
|
||||
if self._alert_queue not in self._sse_alert_queues:
|
||||
logging.error("Web server cleared up a queue of an active connection!")
|
||||
self.close()
|
||||
except:
|
||||
logging.warn("Exception in SSE callback, connection will be closed.")
|
||||
logging.warning("Exception in SSE callback, connection will be closed.")
|
||||
self.close()
|
||||
|
||||
|
||||
|
||||
|
||||
# Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in
|
||||
# the main "alerts" GET call.
|
||||
def get_alert_list_with_filters(all_alerts, query):
|
||||
"""Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in
|
||||
the main "alerts" GET call."""
|
||||
|
||||
# Create a shallow copy of the alert list ordered by start time, then filter the list to reduce it only to alerts
|
||||
# that match the filter parameters in the query string. Finally, apply a limit to the number of alerts returned.
|
||||
# The list of query string filters is defined in the API docs.
|
||||
@@ -134,9 +139,11 @@ def get_alert_list_with_filters(all_alerts, query):
|
||||
alerts = alerts[:int(query.get("limit"))]
|
||||
return alerts
|
||||
|
||||
# Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list
|
||||
# of query parameters and their function is defined in the API docs.
|
||||
|
||||
def alert_allowed_by_query(alert, query):
|
||||
"""Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list
|
||||
of query parameters and their function is defined in the API docs."""
|
||||
|
||||
for k in query.keys():
|
||||
match k:
|
||||
case "received_since":
|
||||
@@ -148,8 +155,8 @@ def alert_allowed_by_query(alert, query):
|
||||
# Check the duration if end_time is provided. If end_time is not provided, assume the activation is
|
||||
# "short", i.e. it always passes this check. If dxpeditions_skip_max_duration_check is true and
|
||||
# the alert is a dxpedition, it also always passes the check.
|
||||
if alert.is_dxpedition and (bool(query.get(
|
||||
"dxpeditions_skip_max_duration_check")) if "dxpeditions_skip_max_duration_check" in query.keys() else False):
|
||||
if alert.is_dxpedition and (query.get(
|
||||
"dxpeditions_skip_max_duration_check").upper() == "TRUE" if "dxpeditions_skip_max_duration_check" in query.keys() else False):
|
||||
continue
|
||||
if alert.end_time and alert.start_time and alert.end_time - alert.start_time > max_duration:
|
||||
return False
|
||||
|
||||
@@ -5,7 +5,6 @@ from datetime import datetime
|
||||
|
||||
import pytz
|
||||
import tornado
|
||||
from pyhamtools.locator import locator_to_latlong
|
||||
|
||||
from core.constants import SIGS
|
||||
from core.geo_utils import lat_lon_for_grid_sw_corner_plus_size, lat_lon_to_cq_zone, lat_lon_to_itu_zone
|
||||
@@ -16,17 +15,18 @@ from data.sig_ref import SIGRef
|
||||
from data.spot import Spot
|
||||
|
||||
|
||||
# API request handler for /api/v1/lookup/call
|
||||
class APILookupCallHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/lookup/call"""
|
||||
|
||||
def initialize(self, web_server_metrics):
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def get(self):
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
@@ -75,17 +75,18 @@ class APILookupCallHandler(tornado.web.RequestHandler):
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
|
||||
# API request handler for /api/v1/lookup/sigref
|
||||
class APILookupSIGRefHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/lookup/sigref"""
|
||||
|
||||
def initialize(self, web_server_metrics):
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def get(self):
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
@@ -96,15 +97,15 @@ class APILookupSIGRefHandler(tornado.web.RequestHandler):
|
||||
# the provided id must match it.
|
||||
if "sig" in query_params.keys() and "id" in query_params.keys():
|
||||
sig = query_params.get("sig").upper()
|
||||
id = query_params.get("id").upper()
|
||||
ref_id = query_params.get("id").upper()
|
||||
if sig in list(map(lambda p: p.name, SIGS)):
|
||||
if not get_ref_regex_for_sig(sig) or re.match(get_ref_regex_for_sig(sig), id):
|
||||
data = populate_sig_ref_info(SIGRef(id=id, sig=sig))
|
||||
if not get_ref_regex_for_sig(sig) or re.match(get_ref_regex_for_sig(sig), ref_id):
|
||||
data = populate_sig_ref_info(SIGRef(id=ref_id, sig=sig))
|
||||
self.write(json.dumps(data, default=serialize_everything))
|
||||
|
||||
else:
|
||||
self.write(
|
||||
json.dumps("Error - '" + id + "' does not look like a valid reference ID for " + sig + ".",
|
||||
json.dumps("Error - '" + ref_id + "' does not look like a valid reference ID for " + sig + ".",
|
||||
default=serialize_everything))
|
||||
self.set_status(422)
|
||||
else:
|
||||
@@ -123,18 +124,18 @@ class APILookupSIGRefHandler(tornado.web.RequestHandler):
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
|
||||
|
||||
# API request handler for /api/v1/lookup/grid
|
||||
class APILookupGridHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/lookup/grid"""
|
||||
|
||||
def initialize(self, web_server_metrics):
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def get(self):
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
@@ -152,17 +153,17 @@ class APILookupGridHandler(tornado.web.RequestHandler):
|
||||
center_itu_zone = lat_lon_to_itu_zone(center_lat, center_lon)
|
||||
|
||||
response = {
|
||||
"center" : {
|
||||
"center": {
|
||||
"latitude": center_lat,
|
||||
"longitude": center_lon,
|
||||
"cq_zone": center_cq_zone,
|
||||
"itu_zone": center_itu_zone
|
||||
},
|
||||
"southwest" : {
|
||||
"southwest": {
|
||||
"latitude": lat,
|
||||
"longitude": lon,
|
||||
},
|
||||
"northeast" : {
|
||||
"northeast": {
|
||||
"latitude": lat + lat_cell_size,
|
||||
"longitude": lon + lon_cell_size,
|
||||
}}
|
||||
|
||||
@@ -4,23 +4,24 @@ from datetime import datetime
|
||||
import pytz
|
||||
import tornado
|
||||
|
||||
from core.config import MAX_SPOT_AGE, ALLOW_SPOTTING, WEB_UI_OPTIONS
|
||||
from core.config import MAX_SPOT_AGE, ALLOW_SPOTTING
|
||||
from core.constants import BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS
|
||||
from core.prometheus_metrics_handler import api_requests_counter
|
||||
from core.utils import serialize_everything
|
||||
|
||||
|
||||
# API request handler for /api/v1/options
|
||||
class APIOptionsHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/options"""
|
||||
|
||||
def initialize(self, status_data, web_server_metrics):
|
||||
self.status_data = status_data
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._status_data = status_data
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def get(self):
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
options = {"bands": BANDS,
|
||||
@@ -29,9 +30,9 @@ class APIOptionsHandler(tornado.web.RequestHandler):
|
||||
"sigs": SIGS,
|
||||
# Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available.
|
||||
"spot_sources": list(
|
||||
map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))),
|
||||
map(lambda p: p["name"], filter(lambda p: p["enabled"], self._status_data["spot_providers"]))),
|
||||
"alert_sources": list(
|
||||
map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))),
|
||||
map(lambda p: p["name"], filter(lambda p: p["enabled"], self._status_data["alert_providers"]))),
|
||||
"continents": CONTINENTS,
|
||||
"max_spot_age": MAX_SPOT_AGE,
|
||||
"spot_allowed": ALLOW_SPOTTING}
|
||||
|
||||
@@ -14,18 +14,19 @@ SSE_HANDLER_MAX_QUEUE_SIZE = 1000
|
||||
SSE_HANDLER_QUEUE_CHECK_INTERVAL = 5000
|
||||
|
||||
|
||||
# API request handler for /api/v1/spots
|
||||
class APISpotsHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/spots"""
|
||||
|
||||
def initialize(self, spots, web_server_metrics):
|
||||
self.spots = spots
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._spots = spots
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def get(self):
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
@@ -33,7 +34,7 @@ class APISpotsHandler(tornado.web.RequestHandler):
|
||||
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
|
||||
# Fetch all spots matching the query
|
||||
data = get_spot_list_with_filters(self.spots, query_params)
|
||||
data = get_spot_list_with_filters(self._spots, query_params)
|
||||
self.write(json.dumps(data, default=serialize_everything))
|
||||
self.set_status(200)
|
||||
except ValueError as e:
|
||||
@@ -48,78 +49,83 @@ class APISpotsHandler(tornado.web.RequestHandler):
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
|
||||
# API request handler for /api/v1/spots/stream
|
||||
class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
|
||||
def initialize(self, sse_spot_queues, web_server_metrics):
|
||||
self.sse_spot_queues = sse_spot_queues
|
||||
self.web_server_metrics = web_server_metrics
|
||||
"""API request handler for /api/v1/spots/stream"""
|
||||
|
||||
def initialize(self, sse_spot_queues, web_server_metrics):
|
||||
self._sse_spot_queues = sse_spot_queues
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
# Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data
|
||||
def custom_headers(self):
|
||||
"""Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data"""
|
||||
|
||||
return {"Cache-Control": "no-store",
|
||||
"X-Accel-Buffering": "no"}
|
||||
|
||||
# Called once on the client opening a connection, set things up
|
||||
def open(self):
|
||||
"""Called once on the client opening a connection, set things up"""
|
||||
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
# reduce that to just the first entry, and convert bytes to string
|
||||
self.query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
self._query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
|
||||
# Create a spot queue and add it to the web server's list. The web server will fill this when spots arrive
|
||||
self.spot_queue = Queue(maxsize=SSE_HANDLER_MAX_QUEUE_SIZE)
|
||||
self.sse_spot_queues.append(self.spot_queue)
|
||||
self._spot_queue = Queue(maxsize=SSE_HANDLER_MAX_QUEUE_SIZE)
|
||||
self._sse_spot_queues.append(self._spot_queue)
|
||||
|
||||
# Set up a timed callback to check if anything is in the queue
|
||||
self.heartbeat = tornado.ioloop.PeriodicCallback(self._callback, SSE_HANDLER_QUEUE_CHECK_INTERVAL)
|
||||
self.heartbeat.start()
|
||||
self._heartbeat = tornado.ioloop.PeriodicCallback(self._callback, SSE_HANDLER_QUEUE_CHECK_INTERVAL)
|
||||
self._heartbeat.start()
|
||||
|
||||
except Exception as e:
|
||||
logging.warn("Exception when serving SSE socket", e)
|
||||
logging.warning("Exception when serving SSE socket", e)
|
||||
|
||||
# When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it
|
||||
def close(self):
|
||||
"""When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it"""
|
||||
|
||||
try:
|
||||
if self.spot_queue in self.sse_spot_queues:
|
||||
self.sse_spot_queues.remove(self.spot_queue)
|
||||
empty_queue(self.spot_queue)
|
||||
if self._spot_queue in self._sse_spot_queues:
|
||||
self._sse_spot_queues.remove(self._spot_queue)
|
||||
empty_queue(self._spot_queue)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
self.heartbeat.stop()
|
||||
self._heartbeat.stop()
|
||||
except:
|
||||
pass
|
||||
self.spot_queue = None
|
||||
self._spot_queue = None
|
||||
super().close()
|
||||
|
||||
# Callback to check if anything has arrived in the queue, and if so send it to the client
|
||||
def _callback(self):
|
||||
"""Callback to check if anything has arrived in the queue, and if so send it to the client"""
|
||||
|
||||
try:
|
||||
if self.spot_queue:
|
||||
while not self.spot_queue.empty():
|
||||
spot = self.spot_queue.get()
|
||||
if self._spot_queue:
|
||||
while not self._spot_queue.empty():
|
||||
spot = self._spot_queue.get()
|
||||
# If the new spot matches our param filters, send it to the client. If not, ignore it.
|
||||
if spot_allowed_by_query(spot, self.query_params):
|
||||
if spot_allowed_by_query(spot, self._query_params):
|
||||
self.write_message(msg=json.dumps(spot, default=serialize_everything))
|
||||
|
||||
if self.spot_queue not in self.sse_spot_queues:
|
||||
if self._spot_queue not in self._sse_spot_queues:
|
||||
logging.error("Web server cleared up a queue of an active connection!")
|
||||
self.close()
|
||||
except:
|
||||
logging.warn("Exception in SSE callback, connection will be closed.")
|
||||
logging.warning("Exception in SSE callback, connection will be closed.")
|
||||
self.close()
|
||||
|
||||
|
||||
|
||||
# Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in
|
||||
# the main "spots" GET call.
|
||||
def get_spot_list_with_filters(all_spots, query):
|
||||
"""Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in
|
||||
the main "spots" GET call."""
|
||||
|
||||
# Create a shallow copy of the spot list, ordered by spot time, then filter the list to reduce it only to spots
|
||||
# that match the filter parameters in the query string. Finally, apply a limit to the number of spots returned.
|
||||
# The list of query string filters is defined in the API docs.
|
||||
@@ -142,22 +148,24 @@ def get_spot_list_with_filters(all_spots, query):
|
||||
# duplicates are fine in the main spot list (e.g. different cluster spots of the same DX) this doesn't
|
||||
# work well for the other views.
|
||||
if "dedupe" in query.keys():
|
||||
dedupe = query.get("dedupe").upper() == "TRUE"
|
||||
if dedupe:
|
||||
spots_temp = []
|
||||
already_seen = []
|
||||
for s in spots:
|
||||
call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "")
|
||||
if call_plus_ssid not in already_seen:
|
||||
spots_temp.append(s)
|
||||
already_seen.append(call_plus_ssid)
|
||||
spots = spots_temp
|
||||
dedupe = query.get("dedupe").upper() == "TRUE"
|
||||
if dedupe:
|
||||
spots_temp = []
|
||||
already_seen = []
|
||||
for s in spots:
|
||||
call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "")
|
||||
if call_plus_ssid not in already_seen:
|
||||
spots_temp.append(s)
|
||||
already_seen.append(call_plus_ssid)
|
||||
spots = spots_temp
|
||||
|
||||
return spots
|
||||
|
||||
# Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list
|
||||
# of query parameters and their function is defined in the API docs.
|
||||
|
||||
def spot_allowed_by_query(spot, query):
|
||||
"""Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list
|
||||
of query parameters and their function is defined in the API docs."""
|
||||
|
||||
for k in query.keys():
|
||||
match k:
|
||||
case "since":
|
||||
@@ -233,11 +241,11 @@ def spot_allowed_by_query(spot, query):
|
||||
case "allow_qrt":
|
||||
# If false, spots that are flagged as QRT are not returned.
|
||||
prevent_qrt = query.get(k).upper() == "FALSE"
|
||||
if prevent_qrt and spot.qrt and spot.qrt == True:
|
||||
if prevent_qrt and spot.qrt:
|
||||
return False
|
||||
case "needs_good_location":
|
||||
# If true, spots require a "good" location to be returned
|
||||
needs_good_location = query.get(k).upper() == "TRUE"
|
||||
if needs_good_location and not spot.dx_location_good:
|
||||
return False
|
||||
return True
|
||||
return True
|
||||
|
||||
@@ -8,20 +8,21 @@ from core.prometheus_metrics_handler import api_requests_counter
|
||||
from core.utils import serialize_everything
|
||||
|
||||
|
||||
# API request handler for /api/v1/status
|
||||
class APIStatusHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/status"""
|
||||
|
||||
def initialize(self, status_data, web_server_metrics):
|
||||
self.status_data = status_data
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._status_data = status_data
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def get(self):
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["api_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["api_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
api_requests_counter.inc()
|
||||
|
||||
self.write(json.dumps(self.status_data, default=serialize_everything))
|
||||
self.write(json.dumps(self._status_data, default=serialize_everything))
|
||||
self.set_status(200)
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
@@ -4,8 +4,9 @@ from prometheus_client import CONTENT_TYPE_LATEST
|
||||
from core.prometheus_metrics_handler import get_metrics
|
||||
|
||||
|
||||
# Handler for Prometheus metrics endpoint
|
||||
class PrometheusMetricsHandler(tornado.web.RequestHandler):
|
||||
"""Handler for Prometheus metrics endpoint"""
|
||||
|
||||
def get(self):
|
||||
self.write(get_metrics())
|
||||
self.set_status(200)
|
||||
|
||||
@@ -3,25 +3,25 @@ from datetime import datetime
|
||||
import pytz
|
||||
import tornado
|
||||
|
||||
from core.config import ALLOW_SPOTTING, WEB_UI_OPTIONS
|
||||
from core.config import ALLOW_SPOTTING, WEB_UI_OPTIONS, BASE_URL
|
||||
from core.constants import SOFTWARE_VERSION
|
||||
from core.prometheus_metrics_handler import page_requests_counter
|
||||
|
||||
|
||||
# Handler for all HTML pages generated from templates
|
||||
class PageTemplateHandler(tornado.web.RequestHandler):
|
||||
"""Handler for all HTML pages generated from templates"""
|
||||
|
||||
def initialize(self, template_name, web_server_metrics):
|
||||
self.template_name = template_name
|
||||
self.web_server_metrics = web_server_metrics
|
||||
self._template_name = template_name
|
||||
self._web_server_metrics = web_server_metrics
|
||||
|
||||
def get(self):
|
||||
# Metrics
|
||||
self.web_server_metrics["last_page_access_time"] = datetime.now(pytz.UTC)
|
||||
self.web_server_metrics["page_access_counter"] += 1
|
||||
self.web_server_metrics["status"] = "OK"
|
||||
self._web_server_metrics["last_page_access_time"] = datetime.now(pytz.UTC)
|
||||
self._web_server_metrics["page_access_counter"] += 1
|
||||
self._web_server_metrics["status"] = "OK"
|
||||
page_requests_counter.inc()
|
||||
|
||||
# Load named template, and provide variables used in templates
|
||||
self.render(self.template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING,
|
||||
web_ui_options=WEB_UI_OPTIONS)
|
||||
|
||||
self.render(self._template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING,
|
||||
web_ui_options=WEB_UI_OPTIONS, baseurl = BASE_URL, current_path=self.request.path)
|
||||
@@ -16,17 +16,19 @@ from server.handlers.metrics import PrometheusMetricsHandler
|
||||
from server.handlers.pagetemplate import PageTemplateHandler
|
||||
|
||||
|
||||
# Provides the public-facing web server.
|
||||
class WebServer:
|
||||
# Constructor
|
||||
"""Provides the public-facing web server."""
|
||||
|
||||
def __init__(self, spots, alerts, status_data, port):
|
||||
self.spots = spots
|
||||
self.alerts = alerts
|
||||
self.sse_spot_queues = []
|
||||
self.sse_alert_queues = []
|
||||
self.status_data = status_data
|
||||
self.port = port
|
||||
self.shutdown_event = asyncio.Event()
|
||||
"""Constructor"""
|
||||
|
||||
self._spots = spots
|
||||
self._alerts = alerts
|
||||
self._sse_spot_queues = []
|
||||
self._sse_alert_queues = []
|
||||
self._status_data = status_data
|
||||
self._port = port
|
||||
self._shutdown_event = asyncio.Event()
|
||||
self.web_server_metrics = {
|
||||
"last_page_access_time": None,
|
||||
"last_api_access_time": None,
|
||||
@@ -35,37 +37,49 @@ class WebServer:
|
||||
"status": "Starting"
|
||||
}
|
||||
|
||||
# Start the web server
|
||||
def start(self):
|
||||
asyncio.run(self.start_inner())
|
||||
"""Start the web server"""
|
||||
|
||||
asyncio.run(self._start_inner())
|
||||
|
||||
# Stop the web server
|
||||
def stop(self):
|
||||
self.shutdown_event.set()
|
||||
"""Stop the web server"""
|
||||
|
||||
self._shutdown_event.set()
|
||||
|
||||
async def _start_inner(self):
|
||||
"""Start method (async). Sets up the Tornado application."""
|
||||
|
||||
# Start method (async). Sets up the Tornado application.
|
||||
async def start_inner(self):
|
||||
app = tornado.web.Application([
|
||||
# Routes for API calls
|
||||
(r"/api/v1/spots", APISpotsHandler, {"spots": self.spots, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/alerts", APIAlertsHandler, {"alerts": self.alerts, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/spots/stream", APISpotsStreamHandler, {"sse_spot_queues": self.sse_spot_queues, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/alerts/stream", APIAlertsStreamHandler, {"sse_alert_queues": self.sse_alert_queues, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/options", APIOptionsHandler, {"status_data": self.status_data, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/status", APIStatusHandler, {"status_data": self.status_data, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/spots", APISpotsHandler, {"spots": self._spots, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/alerts", APIAlertsHandler,
|
||||
{"alerts": self._alerts, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/spots/stream", APISpotsStreamHandler,
|
||||
{"sse_spot_queues": self._sse_spot_queues, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/alerts/stream", APIAlertsStreamHandler,
|
||||
{"sse_alert_queues": self._sse_alert_queues, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/options", APIOptionsHandler,
|
||||
{"status_data": self._status_data, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/status", APIStatusHandler,
|
||||
{"status_data": self._status_data, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/lookup/call", APILookupCallHandler, {"web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/lookup/sigref", APILookupSIGRefHandler, {"web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/lookup/grid", APILookupGridHandler, {"web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/spot", APISpotHandler, {"spots": self.spots, "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/api/v1/spot", APISpotHandler, {"spots": self._spots, "web_server_metrics": self.web_server_metrics}),
|
||||
# Routes for templated pages
|
||||
(r"/", PageTemplateHandler, {"template_name": "spots", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/map", PageTemplateHandler, {"template_name": "map", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/bands", PageTemplateHandler, {"template_name": "bands", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/alerts", PageTemplateHandler, {"template_name": "alerts", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/add-spot", PageTemplateHandler, {"template_name": "add_spot", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/status", PageTemplateHandler, {"template_name": "status", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/alerts", PageTemplateHandler,
|
||||
{"template_name": "alerts", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/add-spot", PageTemplateHandler,
|
||||
{"template_name": "add_spot", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/status", PageTemplateHandler,
|
||||
{"template_name": "status", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/about", PageTemplateHandler, {"template_name": "about", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/apidocs", PageTemplateHandler, {"template_name": "apidocs", "web_server_metrics": self.web_server_metrics}),
|
||||
(r"/apidocs", PageTemplateHandler,
|
||||
{"template_name": "apidocs", "web_server_metrics": self.web_server_metrics}),
|
||||
# Route for Prometheus metrics
|
||||
(r"/metrics", PrometheusMetricsHandler),
|
||||
# Default route to serve from "webassets"
|
||||
@@ -73,13 +87,14 @@ class WebServer:
|
||||
],
|
||||
template_path=os.path.join(os.path.dirname(__file__), "../templates"),
|
||||
debug=False)
|
||||
app.listen(self.port)
|
||||
await self.shutdown_event.wait()
|
||||
app.listen(self._port)
|
||||
await self._shutdown_event.wait()
|
||||
|
||||
# Internal method called when a new spot is added to the system. This is used to ping any SSE clients that are
|
||||
# awaiting a server-sent message with new spots.
|
||||
def notify_new_spot(self, spot):
|
||||
for queue in self.sse_spot_queues:
|
||||
"""Internal method called when a new spot is added to the system. This is used to ping any SSE clients that are
|
||||
awaiting a server-sent message with new spots."""
|
||||
|
||||
for queue in self._sse_spot_queues:
|
||||
try:
|
||||
queue.put(spot)
|
||||
except:
|
||||
@@ -87,10 +102,11 @@ class WebServer:
|
||||
pass
|
||||
pass
|
||||
|
||||
# Internal method called when a new alert is added to the system. This is used to ping any SSE clients that are
|
||||
# awaiting a server-sent message with new spots.
|
||||
def notify_new_alert(self, alert):
|
||||
for queue in self.sse_alert_queues:
|
||||
"""Internal method called when a new alert is added to the system. This is used to ping any SSE clients that are
|
||||
awaiting a server-sent message with new spots."""
|
||||
|
||||
for queue in self._sse_alert_queues:
|
||||
try:
|
||||
queue.put(alert)
|
||||
except:
|
||||
@@ -98,23 +114,26 @@ class WebServer:
|
||||
pass
|
||||
pass
|
||||
|
||||
# Clean up any SSE queues that are growing too large; probably their client disconnected and we didn't catch it
|
||||
# properly for some reason.
|
||||
def clean_up_sse_queues(self):
|
||||
for q in self.sse_spot_queues:
|
||||
"""Clean up any SSE queues that are growing too large; probably their client disconnected and we didn't catch it
|
||||
properly for some reason."""
|
||||
|
||||
for q in self._sse_spot_queues:
|
||||
try:
|
||||
if q.full():
|
||||
logging.warn("A full SSE spot queue was found, presumably because the client disconnected strangely. It has been removed.")
|
||||
self.sse_spot_queues.remove(q)
|
||||
logging.warning(
|
||||
"A full SSE spot queue was found, presumably because the client disconnected strangely. It has been removed.")
|
||||
self._sse_spot_queues.remove(q)
|
||||
empty_queue(q)
|
||||
except:
|
||||
# Probably got deleted already on another thread
|
||||
pass
|
||||
for q in self.sse_alert_queues:
|
||||
for q in self._sse_alert_queues:
|
||||
try:
|
||||
if q.full():
|
||||
logging.warn("A full SSE alert queue was found, presumably because the client disconnected strangely. It has been removed.")
|
||||
self.sse_alert_queues.remove(q)
|
||||
logging.warning(
|
||||
"A full SSE alert queue was found, presumably because the client disconnected strangely. It has been removed.")
|
||||
self._sse_alert_queues.remove(q)
|
||||
empty_queue(q)
|
||||
except:
|
||||
# Probably got deleted already on another thread
|
||||
|
||||
21
spothole.py
21
spothole.py
@@ -25,18 +25,19 @@ cleanup_timer = None
|
||||
run = True
|
||||
|
||||
|
||||
# Shutdown function
|
||||
def shutdown(sig, frame):
|
||||
"""Shutdown function"""
|
||||
|
||||
global run
|
||||
|
||||
logging.info("Stopping program...")
|
||||
web_server.stop()
|
||||
for p in spot_providers:
|
||||
if p.enabled:
|
||||
p.stop()
|
||||
for p in alert_providers:
|
||||
if p.enabled:
|
||||
p.stop()
|
||||
for sp in spot_providers:
|
||||
if sp.enabled:
|
||||
sp.stop()
|
||||
for ap in alert_providers:
|
||||
if ap.enabled:
|
||||
ap.stop()
|
||||
cleanup_timer.stop()
|
||||
lookup_helper.stop()
|
||||
spots.close()
|
||||
@@ -44,15 +45,17 @@ def shutdown(sig, frame):
|
||||
os._exit(0)
|
||||
|
||||
|
||||
# Utility method to get a spot provider based on the class specified in its config entry.
|
||||
def get_spot_provider_from_config(config_providers_entry):
|
||||
"""Utility method to get a spot provider based on the class specified in its config entry."""
|
||||
|
||||
module = importlib.import_module('spotproviders.' + config_providers_entry["class"].lower())
|
||||
provider_class = getattr(module, config_providers_entry["class"])
|
||||
return provider_class(config_providers_entry)
|
||||
|
||||
|
||||
# Utility method to get an alert provider based on the class specified in its config entry.
|
||||
def get_alert_provider_from_config(config_providers_entry):
|
||||
"""Utility method to get an alert provider based on the class specified in its config entry."""
|
||||
|
||||
module = importlib.import_module('alertproviders.' + config_providers_entry["class"].lower())
|
||||
provider_class = getattr(module, config_providers_entry["class"])
|
||||
return provider_class(config_providers_entry)
|
||||
|
||||
@@ -10,32 +10,32 @@ from data.spot import Spot
|
||||
from spotproviders.spot_provider import SpotProvider
|
||||
|
||||
|
||||
# Spot provider for the APRS-IS.
|
||||
class APRSIS(SpotProvider):
|
||||
"""Spot provider for the APRS-IS."""
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config)
|
||||
self.thread = Thread(target=self.connect)
|
||||
self.thread.daemon = True
|
||||
self.aprsis = None
|
||||
self._thread = Thread(target=self._connect)
|
||||
self._thread.daemon = True
|
||||
self._aprsis = None
|
||||
|
||||
def start(self):
|
||||
self.thread.start()
|
||||
self._thread.start()
|
||||
|
||||
def connect(self):
|
||||
self.aprsis = aprslib.IS(SERVER_OWNER_CALLSIGN)
|
||||
def _connect(self):
|
||||
self._aprsis = aprslib.IS(SERVER_OWNER_CALLSIGN)
|
||||
self.status = "Connecting"
|
||||
logging.info("APRS-IS connecting...")
|
||||
self.aprsis.connect()
|
||||
self.aprsis.consumer(self.handle)
|
||||
self._aprsis.connect()
|
||||
self._aprsis.consumer(self._handle)
|
||||
logging.info("APRS-IS connected.")
|
||||
|
||||
def stop(self):
|
||||
self.status = "Shutting down"
|
||||
self.aprsis.close()
|
||||
self.thread.join()
|
||||
self._aprsis.close()
|
||||
self._thread.join()
|
||||
|
||||
def handle(self, data):
|
||||
def _handle(self, data):
|
||||
# Split SSID in "from" call and store separately
|
||||
from_parts = data["from"].split("-").upper()
|
||||
dx_call = from_parts[0]
|
||||
@@ -51,11 +51,12 @@ class APRSIS(SpotProvider):
|
||||
comment=data["comment"] if "comment" in data else None,
|
||||
dx_latitude=data["latitude"] if "latitude" in data else None,
|
||||
dx_longitude=data["longitude"] if "longitude" in data else None,
|
||||
time=datetime.now(pytz.UTC).timestamp()) # APRS-IS spots are live so we can assume spot time is "now"
|
||||
time=datetime.now(
|
||||
pytz.UTC).timestamp()) # APRS-IS spots are live so we can assume spot time is "now"
|
||||
|
||||
# Add to our list
|
||||
self.submit(spot)
|
||||
self._submit(spot)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(pytz.UTC)
|
||||
logging.debug("Data received from APRS-IS.")
|
||||
logging.debug("Data received from APRS-IS.")
|
||||
|
||||
@@ -12,66 +12,67 @@ from data.spot import Spot
|
||||
from spotproviders.spot_provider import SpotProvider
|
||||
|
||||
|
||||
# Spot provider for a DX Cluster. Hostname, port, login_prompt, login_callsign and allow_rbn_spots are provided in config.
|
||||
# See config-example.yml for examples.
|
||||
class DXCluster(SpotProvider):
|
||||
CALLSIGN_PATTERN = "([a-z|0-9|/]+)"
|
||||
FREQUENCY_PATTERN = "([0-9|.]+)"
|
||||
LINE_PATTERN_EXCLUDE_RBN = re.compile(
|
||||
"^DX de " + CALLSIGN_PATTERN + ":\\s+" + FREQUENCY_PATTERN + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)",
|
||||
"""Spot provider for a DX Cluster. Hostname, port, login_prompt, login_callsign and allow_rbn_spots are provided in config.
|
||||
See config-example.yml for examples."""
|
||||
|
||||
_LINE_PATTERN_EXCLUDE_RBN = re.compile(
|
||||
r"^DX de ([a-z0-9/]+):\s+([0-9.]+)\s+([a-z0-9/]+)\s+(.*)\s+(\d{4}Z)",
|
||||
re.IGNORECASE)
|
||||
LINE_PATTERN_ALLOW_RBN = re.compile(
|
||||
"^DX de " + CALLSIGN_PATTERN + "-?#?:\\s+" + FREQUENCY_PATTERN + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)",
|
||||
_LINE_PATTERN_ALLOW_RBN = re.compile(
|
||||
r"^DX de ([a-z0-9/]+)-?#?:\s+([0-9.]+)\s+([a-z0-9/]+)\s+(.*)\s+(\d{4}Z)",
|
||||
re.IGNORECASE)
|
||||
|
||||
# Constructor requires hostname and port
|
||||
def __init__(self, provider_config):
|
||||
"""Constructor requires hostname and port"""
|
||||
|
||||
super().__init__(provider_config)
|
||||
self.hostname = provider_config["host"]
|
||||
self.port = provider_config["port"]
|
||||
self.login_prompt = provider_config["login_prompt"] if "login_prompt" in provider_config else "login:"
|
||||
self.login_callsign = provider_config["login_callsign"] if "login_callsign" in provider_config else SERVER_OWNER_CALLSIGN
|
||||
self.allow_rbn_spots = provider_config["allow_rbn_spots"] if "allow_rbn_spots" in provider_config else False
|
||||
self.spot_line_pattern = self.LINE_PATTERN_ALLOW_RBN if self.allow_rbn_spots else self.LINE_PATTERN_EXCLUDE_RBN
|
||||
self.telnet = None
|
||||
self.thread = Thread(target=self.handle)
|
||||
self.thread.daemon = True
|
||||
self.run = True
|
||||
self._hostname = provider_config["host"]
|
||||
self._port = provider_config["port"]
|
||||
self._login_prompt = provider_config["login_prompt"] if "login_prompt" in provider_config else "login:"
|
||||
self._login_callsign = provider_config[
|
||||
"login_callsign"] if "login_callsign" in provider_config else SERVER_OWNER_CALLSIGN
|
||||
self._allow_rbn_spots = provider_config["allow_rbn_spots"] if "allow_rbn_spots" in provider_config else False
|
||||
self._spot_line_pattern = self._LINE_PATTERN_ALLOW_RBN if self._allow_rbn_spots else self._LINE_PATTERN_EXCLUDE_RBN
|
||||
self._telnet = None
|
||||
self._thread = Thread(target=self._handle)
|
||||
self._thread.daemon = True
|
||||
self._running = True
|
||||
|
||||
def start(self):
|
||||
self.thread.start()
|
||||
self._thread.start()
|
||||
|
||||
def stop(self):
|
||||
self.run = False
|
||||
self.telnet.close()
|
||||
self.thread.join()
|
||||
self._running = False
|
||||
self._telnet.close()
|
||||
self._thread.join()
|
||||
|
||||
def handle(self):
|
||||
while self.run:
|
||||
def _handle(self):
|
||||
while self._running:
|
||||
connected = False
|
||||
while not connected and self.run:
|
||||
while not connected and self._running:
|
||||
try:
|
||||
self.status = "Connecting"
|
||||
logging.info("DX Cluster " + self.hostname + " connecting...")
|
||||
self.telnet = telnetlib3.Telnet(self.hostname, self.port)
|
||||
self.telnet.read_until(self.login_prompt.encode("latin-1"))
|
||||
self.telnet.write((self.login_callsign + "\n").encode("latin-1"))
|
||||
logging.info("DX Cluster " + self._hostname + " connecting...")
|
||||
self._telnet = telnetlib3.Telnet(self._hostname, self._port)
|
||||
self._telnet.read_until(self._login_prompt.encode("latin-1"))
|
||||
self._telnet.write((self._login_callsign + "\n").encode("latin-1"))
|
||||
connected = True
|
||||
logging.info("DX Cluster " + self.hostname + " connected.")
|
||||
except Exception as e:
|
||||
logging.info("DX Cluster " + self._hostname + " connected.")
|
||||
except Exception:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception while connecting to DX Cluster Provider (" + self.hostname + ").")
|
||||
logging.exception("Exception while connecting to DX Cluster Provider (" + self._hostname + ").")
|
||||
sleep(5)
|
||||
|
||||
self.status = "Waiting for Data"
|
||||
while connected and self.run:
|
||||
while connected and self._running:
|
||||
try:
|
||||
# Check new telnet info against regular expression
|
||||
telnet_output = self.telnet.read_until("\n".encode("latin-1"))
|
||||
match = self.spot_line_pattern.match(telnet_output.decode("latin-1"))
|
||||
telnet_output = self._telnet.read_until("\n".encode("latin-1"))
|
||||
match = self._spot_line_pattern.match(telnet_output.decode("latin-1"))
|
||||
if match:
|
||||
spot_time = datetime.strptime(match.group(5), "%H%MZ")
|
||||
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
|
||||
spot_datetime = datetime.combine(datetime.now(pytz.UTC).date(), spot_time.time(), tzinfo=pytz.UTC)
|
||||
spot = Spot(source=self.name,
|
||||
dx_call=match.group(3),
|
||||
de_call=match.group(1),
|
||||
@@ -80,20 +81,20 @@ class DXCluster(SpotProvider):
|
||||
time=spot_datetime.timestamp())
|
||||
|
||||
# Add to our list
|
||||
self.submit(spot)
|
||||
self._submit(spot)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(pytz.UTC)
|
||||
logging.debug("Data received from DX Cluster " + self.hostname + ".")
|
||||
logging.debug("Data received from DX Cluster " + self._hostname + ".")
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
connected = False
|
||||
if self.run:
|
||||
if self._running:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception in DX Cluster Provider (" + self.hostname + ")")
|
||||
logging.exception("Exception in DX Cluster Provider (" + self._hostname + ")")
|
||||
sleep(5)
|
||||
else:
|
||||
logging.info("DX Cluster " + self.hostname + " shutting down...")
|
||||
logging.info("DX Cluster " + self._hostname + " shutting down...")
|
||||
self.status = "Shutting down"
|
||||
|
||||
self.status = "Disconnected"
|
||||
self.status = "Disconnected"
|
||||
|
||||
@@ -10,8 +10,9 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for General Mountain Activity
|
||||
class GMA(HTTPSpotProvider):
|
||||
"""Spot provider for General Mountain Activity"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://www.cqgma.org/api/spots/25/"
|
||||
# GMA spots don't contain the details of the programme they are for, we need a separate lookup for that
|
||||
@@ -20,7 +21,7 @@ class GMA(HTTPSpotProvider):
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json()["RCD"]:
|
||||
@@ -36,9 +37,11 @@ class GMA(HTTPSpotProvider):
|
||||
sig_refs=[SIGRef(id=source_spot["REF"], sig="", name=source_spot["NAME"])],
|
||||
time=datetime.strptime(source_spot["DATE"] + source_spot["TIME"], "%Y%m%d%H%M").replace(
|
||||
tzinfo=pytz.UTC).timestamp(),
|
||||
dx_latitude=float(source_spot["LAT"]) if (source_spot["LAT"] and source_spot["LAT"] != "") else None,
|
||||
dx_latitude=float(source_spot["LAT"]) if (
|
||||
source_spot["LAT"] and source_spot["LAT"] != "") else None,
|
||||
# Seen GMA spots with no (or empty) lat/lon
|
||||
dx_longitude=float(source_spot["LON"]) if (source_spot["LON"] and source_spot["LON"] != "") else None)
|
||||
dx_longitude=float(source_spot["LON"]) if (
|
||||
source_spot["LON"] and source_spot["LON"] != "") else None)
|
||||
|
||||
# GMA doesn't give what programme (SIG) the reference is for until we separately look it up.
|
||||
if "REF" in source_spot:
|
||||
@@ -74,7 +77,7 @@ class GMA(HTTPSpotProvider):
|
||||
spot.sig_refs[0].sig = "MOTA"
|
||||
spot.sig = "MOTA"
|
||||
case _:
|
||||
logging.warn("GMA spot found with ref type " + ref_info[
|
||||
logging.warning("GMA spot found with ref type " + ref_info[
|
||||
"reftype"] + ", developer needs to add support for this!")
|
||||
spot.sig_refs[0].sig = ref_info["reftype"]
|
||||
spot.sig = ref_info["reftype"]
|
||||
@@ -83,5 +86,6 @@ class GMA(HTTPSpotProvider):
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
except:
|
||||
logging.warn("Exception when looking up " + self.REF_INFO_URL_ROOT + source_spot["REF"] + ", ignoring this spot for now")
|
||||
logging.warning("Exception when looking up " + self.REF_INFO_URL_ROOT + source_spot[
|
||||
"REF"] + ", ignoring this spot for now")
|
||||
return new_spots
|
||||
|
||||
@@ -10,8 +10,9 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for HuMPs Excluding Marilyns Award
|
||||
class HEMA(HTTPSpotProvider):
|
||||
"""Spot provider for HuMPs Excluding Marilyns Award"""
|
||||
|
||||
POLL_INTERVAL_SEC = 300
|
||||
# HEMA wants us to check for a "spot seed" from the API and see if it's actually changed before querying the main
|
||||
# data API. So it's actually the SPOT_SEED_URL that we pass into the constructor and get the superclass to call on a
|
||||
@@ -23,13 +24,13 @@ class HEMA(HTTPSpotProvider):
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOT_SEED_URL, self.POLL_INTERVAL_SEC)
|
||||
self.spot_seed = ""
|
||||
self._spot_seed = ""
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
# OK, source data is actually just the spot seed at this point. We'll then go on to fetch real data if we know
|
||||
# this has changed.
|
||||
spot_seed_changed = http_response.text != self.spot_seed
|
||||
self.spot_seed = http_response.text
|
||||
spot_seed_changed = http_response.text != self._spot_seed
|
||||
self._spot_seed = http_response.text
|
||||
|
||||
new_spots = []
|
||||
# OK, if the spot seed actually changed, now we make the real request for data.
|
||||
@@ -54,11 +55,12 @@ class HEMA(HTTPSpotProvider):
|
||||
comment=spotter_comment_match.group(2),
|
||||
sig="HEMA",
|
||||
sig_refs=[SIGRef(id=spot_items[3].upper(), sig="HEMA", name=spot_items[4])],
|
||||
time=datetime.strptime(spot_items[0], "%d/%m/%Y %H:%M").replace(tzinfo=pytz.UTC).timestamp(),
|
||||
time=datetime.strptime(spot_items[0], "%d/%m/%Y %H:%M").replace(
|
||||
tzinfo=pytz.UTC).timestamp(),
|
||||
dx_latitude=float(spot_items[7]),
|
||||
dx_longitude=float(spot_items[8]))
|
||||
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
return new_spots
|
||||
|
||||
@@ -9,20 +9,21 @@ from core.constants import HTTP_HEADERS
|
||||
from spotproviders.spot_provider import SpotProvider
|
||||
|
||||
|
||||
# Generic spot provider class for providers that request data via HTTP(S). Just for convenience to avoid code
|
||||
# duplication. Subclasses of this query the individual APIs for data.
|
||||
class HTTPSpotProvider(SpotProvider):
|
||||
"""Generic spot provider class for providers that request data via HTTP(S). Just for convenience to avoid code
|
||||
duplication. Subclasses of this query the individual APIs for data."""
|
||||
|
||||
def __init__(self, provider_config, url, poll_interval):
|
||||
super().__init__(provider_config)
|
||||
self.url = url
|
||||
self.poll_interval = poll_interval
|
||||
self._url = url
|
||||
self._poll_interval = poll_interval
|
||||
self._thread = None
|
||||
self._stop_event = Event()
|
||||
|
||||
def start(self):
|
||||
# Fire off the polling thread. It will poll immediately on startup, then sleep for poll_interval between
|
||||
# subsequent polls, so start() returns immediately and the application can continue starting.
|
||||
logging.info("Set up query of " + self.name + " spot API every " + str(self.poll_interval) + " seconds.")
|
||||
logging.info("Set up query of " + self.name + " spot API every " + str(self._poll_interval) + " seconds.")
|
||||
self._thread = Thread(target=self._run, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
@@ -32,31 +33,32 @@ class HTTPSpotProvider(SpotProvider):
|
||||
def _run(self):
|
||||
while True:
|
||||
self._poll()
|
||||
if self._stop_event.wait(timeout=self.poll_interval):
|
||||
if self._stop_event.wait(timeout=self._poll_interval):
|
||||
break
|
||||
|
||||
def _poll(self):
|
||||
try:
|
||||
# Request data from API
|
||||
logging.debug("Polling " + self.name + " spot API...")
|
||||
http_response = requests.get(self.url, headers=HTTP_HEADERS)
|
||||
http_response = requests.get(self._url, headers=HTTP_HEADERS)
|
||||
# Pass off to the subclass for processing
|
||||
new_spots = self.http_response_to_spots(http_response)
|
||||
new_spots = self._http_response_to_spots(http_response)
|
||||
# Submit the new spots for processing. There might not be any spots for the less popular programs.
|
||||
if new_spots:
|
||||
self.submit_batch(new_spots)
|
||||
self._submit_batch(new_spots)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(pytz.UTC)
|
||||
logging.debug("Received data from " + self.name + " spot API.")
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception in HTTP JSON Spot Provider (" + self.name + ")")
|
||||
self._stop_event.wait(timeout=1)
|
||||
|
||||
# Convert an HTTP response returned by the API into spot data. The whole response is provided here so the subclass
|
||||
# implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever
|
||||
# the API actually provides.
|
||||
def http_response_to_spots(self, http_response):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
def _http_response_to_spots(self, http_response):
|
||||
"""Convert an HTTP response returned by the API into spot data. The whole response is provided here so the subclass
|
||||
implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever
|
||||
the API actually provides."""
|
||||
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
@@ -5,15 +5,16 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for Lagos y Lagunas On the Air
|
||||
class LLOTA(HTTPSpotProvider):
|
||||
"""Spot provider for Lagos y Lagunas On the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://llota.app/api/public/spots"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json():
|
||||
@@ -38,4 +39,4 @@ class LLOTA(HTTPSpotProvider):
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
return new_spots
|
||||
|
||||
@@ -9,8 +9,9 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for Parks n Peaks
|
||||
class ParksNPeaks(HTTPSpotProvider):
|
||||
"""Spot provider for Parks n Peaks"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://www.parksnpeaks.org/api/ALL"
|
||||
SIOTA_LIST_URL = "https://www.silosontheair.com/data/silos.csv"
|
||||
@@ -18,7 +19,7 @@ class ParksNPeaks(HTTPSpotProvider):
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json():
|
||||
@@ -26,32 +27,37 @@ class ParksNPeaks(HTTPSpotProvider):
|
||||
spot = Spot(source=self.name,
|
||||
source_id=source_spot["actID"],
|
||||
dx_call=source_spot["actCallsign"].upper(),
|
||||
de_call=source_spot["actSpoter"].upper() if source_spot["actSpoter"] != "" else None, # typo exists in API
|
||||
de_call=source_spot["actSpoter"].upper() if source_spot["actSpoter"] != "" else None,
|
||||
# typo exists in API
|
||||
freq=float(source_spot["actFreq"].replace(",", "")) * 1000000 if (
|
||||
source_spot["actFreq"] != "") else None,
|
||||
source_spot["actFreq"] != "") else None,
|
||||
# Seen PNP spots with empty frequency, and with comma-separated thousands digits
|
||||
mode=source_spot["actMode"].upper(),
|
||||
comment=source_spot["actComments"],
|
||||
sig=source_spot["actClass"].upper(),
|
||||
sig_refs=[SIGRef(id=source_spot["actSiteID"], sig=source_spot["actClass"].upper())],
|
||||
time=datetime.strptime(source_spot["actTime"], "%Y-%m-%d %H:%M:%S").replace(
|
||||
tzinfo=pytz.UTC).timestamp())
|
||||
|
||||
# Free text location is not present in all spots, so only add it if it's set
|
||||
if "actLocation" in source_spot and source_spot["actLocation"] != "":
|
||||
spot.sig_refs[0].name = source_spot["actLocation"]
|
||||
|
||||
# Extract a de_call if it's in the comment but not in the "actSpoter" field
|
||||
m = re.search(r"\(de ([A-Za-z0-9]*)\)", spot.comment)
|
||||
if not spot.de_call and m:
|
||||
spot.de_call = m.group(1)
|
||||
|
||||
# Log a warning for the developer if PnP gives us an unknown programme we've never seen before
|
||||
if spot.sig_refs[0].sig not in ["POTA", "SOTA", "WWFF", "SIOTA", "ZLOTA", "KRMNPA"]:
|
||||
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to add support for this!")
|
||||
# Record SIG information. Sometimes we get a "SIG" of "QRP", which we ignore as it's not a programme with a
|
||||
# defined set of references
|
||||
sig = source_spot["actClass"].upper()
|
||||
sig_ref = source_spot["actSiteID"]
|
||||
if sig and sig != "" and sig != "QRP" and sig_ref and sig_ref != "":
|
||||
spot.sig = sig
|
||||
spot.sig_refs = [SIGRef(id=source_spot["actSiteID"], sig=source_spot["actClass"].upper())]
|
||||
|
||||
# If this is POTA, SOTA, WWFF or ZLOTA data we already have it through other means, so ignore. Otherwise,
|
||||
# add to the spot list.
|
||||
if spot.sig_refs[0].sig not in ["POTA", "SOTA", "WWFF", "ZLOTA"]:
|
||||
new_spots.append(spot)
|
||||
# Free text location is not present in all spots, so only add it if it's set
|
||||
if "actLocation" in source_spot and source_spot["actLocation"] != "":
|
||||
spot.sig_refs[0].name = source_spot["actLocation"]
|
||||
|
||||
# Log a warning for the developer if PnP gives us an unknown programme we've never seen before
|
||||
if sig not in ["POTA", "SOTA", "WWFF", "SIOTA", "ZLOTA", "KRMNPA"]:
|
||||
logging.warning("PNP spot found with sig " + sig + ", developer needs to add support for this!")
|
||||
|
||||
# Add new spot to the list
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
|
||||
@@ -7,15 +7,16 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for Parks on the Air
|
||||
class POTA(HTTPSpotProvider):
|
||||
"""Spot provider for Parks on the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://api.pota.app/spot/activator"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json():
|
||||
|
||||
@@ -12,59 +12,58 @@ from data.spot import Spot
|
||||
from spotproviders.spot_provider import SpotProvider
|
||||
|
||||
|
||||
# Spot provider for the Reverse Beacon Network. Connects to a single port, if you want both CW/RTTY (port 7000) and FT8
|
||||
# (port 7001) you need to instantiate two copies of this. The port is provided as an argument to the constructor.
|
||||
class RBN(SpotProvider):
|
||||
CALLSIGN_PATTERN = "([a-z|0-9|/]+)"
|
||||
FREQUENCY_PATTERM = "([0-9|.]+)"
|
||||
LINE_PATTERN = re.compile(
|
||||
"^DX de " + CALLSIGN_PATTERN + "-.*:\\s+" + FREQUENCY_PATTERM + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)",
|
||||
"""Spot provider for the Reverse Beacon Network. Connects to a single port, if you want both CW/RTTY (port 7000) and FT8
|
||||
(port 7001) you need to instantiate two copies of this. The port is provided as an argument to the constructor."""
|
||||
|
||||
_LINE_PATTERN = re.compile(
|
||||
r"^DX de ([a-z0-9/]+)-.*:\s+([0-9.]+)\s+([a-z0-9/]+)\s+(.*)\s+(\d{4}Z)",
|
||||
re.IGNORECASE)
|
||||
|
||||
# Constructor requires port number.
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config)
|
||||
self.port = provider_config["port"]
|
||||
self.telnet = None
|
||||
self.thread = Thread(target=self.handle)
|
||||
self.thread.daemon = True
|
||||
self.run = True
|
||||
"""Constructor requires port number."""
|
||||
|
||||
super().__init__(provider_config)
|
||||
self._port = provider_config["port"]
|
||||
self._telnet = None
|
||||
self._thread = Thread(target=self._handle)
|
||||
self._thread.daemon = True
|
||||
self._running = True
|
||||
|
||||
def start(self):
|
||||
self.thread.start()
|
||||
self._thread.start()
|
||||
|
||||
def stop(self):
|
||||
self.run = False
|
||||
self.telnet.close()
|
||||
self.thread.join()
|
||||
self._running = False
|
||||
self._telnet.close()
|
||||
self._thread.join()
|
||||
|
||||
def handle(self):
|
||||
while self.run:
|
||||
def _handle(self):
|
||||
while self._running:
|
||||
connected = False
|
||||
while not connected and self.run:
|
||||
while not connected and self._running:
|
||||
try:
|
||||
self.status = "Connecting"
|
||||
logging.info("RBN port " + str(self.port) + " connecting...")
|
||||
self.telnet = telnetlib3.Telnet("telnet.reversebeacon.net", self.port)
|
||||
telnet_output = self.telnet.read_until("Please enter your call: ".encode("latin-1"))
|
||||
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("latin-1"))
|
||||
logging.info("RBN port " + str(self._port) + " connecting...")
|
||||
self._telnet = telnetlib3.Telnet("telnet.reversebeacon.net", self._port)
|
||||
telnet_output = self._telnet.read_until("Please enter your call: ".encode("latin-1"))
|
||||
self._telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("latin-1"))
|
||||
connected = True
|
||||
logging.info("RBN port " + str(self.port) + " connected.")
|
||||
except Exception as e:
|
||||
logging.info("RBN port " + str(self._port) + " connected.")
|
||||
except Exception:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception while connecting to RBN (port " + str(self.port) + ").")
|
||||
logging.exception("Exception while connecting to RBN (port " + str(self._port) + ").")
|
||||
sleep(5)
|
||||
|
||||
self.status = "Waiting for Data"
|
||||
while connected and self.run:
|
||||
while connected and self._running:
|
||||
try:
|
||||
# Check new telnet info against regular expression
|
||||
telnet_output = self.telnet.read_until("\n".encode("latin-1"))
|
||||
match = self.LINE_PATTERN.match(telnet_output.decode("latin-1"))
|
||||
telnet_output = self._telnet.read_until("\n".encode("latin-1"))
|
||||
match = self._LINE_PATTERN.match(telnet_output.decode("latin-1"))
|
||||
if match:
|
||||
spot_time = datetime.strptime(match.group(5), "%H%MZ")
|
||||
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
|
||||
spot_datetime = datetime.combine(datetime.now(pytz.UTC).date(), spot_time.time(), tzinfo=pytz.UTC)
|
||||
spot = Spot(source=self.name,
|
||||
dx_call=match.group(3),
|
||||
de_call=match.group(1),
|
||||
@@ -73,20 +72,20 @@ class RBN(SpotProvider):
|
||||
time=spot_datetime.timestamp())
|
||||
|
||||
# Add to our list
|
||||
self.submit(spot)
|
||||
self._submit(spot)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(pytz.UTC)
|
||||
logging.debug("Data received from RBN on port " + str(self.port) + ".")
|
||||
logging.debug("Data received from RBN on port " + str(self._port) + ".")
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
connected = False
|
||||
if self.run:
|
||||
if self._running:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception in RBN provider (port " + str(self.port) + ")")
|
||||
logging.exception("Exception in RBN provider (port " + str(self._port) + ")")
|
||||
sleep(5)
|
||||
else:
|
||||
logging.info("RBN provider (port " + str(self.port) + ") shutting down...")
|
||||
logging.info("RBN provider (port " + str(self._port) + ") shutting down...")
|
||||
self.status = "Shutting down"
|
||||
|
||||
self.status = "Disconnected"
|
||||
self.status = "Disconnected"
|
||||
|
||||
@@ -8,8 +8,9 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for Summits on the Air
|
||||
class SOTA(HTTPSpotProvider):
|
||||
"""Spot provider for Summits on the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
# SOTA wants us to check for an "epoch" from the API and see if it's actually changed before querying the main data
|
||||
# APIs. So it's actually the EPOCH_URL that we pass into the constructor and get the superclass to call on a timer.
|
||||
@@ -21,13 +22,13 @@ class SOTA(HTTPSpotProvider):
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.EPOCH_URL, self.POLL_INTERVAL_SEC)
|
||||
self.api_epoch = ""
|
||||
self._api_epoch = ""
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
# OK, source data is actually just the epoch at this point. We'll then go on to fetch real data if we know this
|
||||
# has changed.
|
||||
epoch_changed = http_response.text != self.api_epoch
|
||||
self.api_epoch = http_response.text
|
||||
epoch_changed = http_response.text != self._api_epoch
|
||||
self._api_epoch = http_response.text
|
||||
|
||||
new_spots = []
|
||||
# OK, if the epoch actually changed, now we make the real request for data.
|
||||
@@ -41,14 +42,17 @@ class SOTA(HTTPSpotProvider):
|
||||
dx_call=source_spot["activatorCallsign"].upper(),
|
||||
dx_name=source_spot["activatorName"],
|
||||
de_call=source_spot["callsign"].upper(),
|
||||
freq=(float(source_spot["frequency"]) * 1000000) if (source_spot["frequency"] is not None) else None, # Seen SOTA spots with no frequency!
|
||||
freq=(float(source_spot["frequency"]) * 1000000) if (
|
||||
source_spot["frequency"] is not None) else None,
|
||||
# Seen SOTA spots with no frequency!
|
||||
mode=source_spot["mode"].upper(),
|
||||
comment=source_spot["comments"],
|
||||
sig="SOTA",
|
||||
sig_refs=[SIGRef(id=source_spot["summitCode"], sig="SOTA", name=source_spot["summitName"], activation_score=source_spot["points"])],
|
||||
sig_refs=[SIGRef(id=source_spot["summitCode"], sig="SOTA", name=source_spot["summitName"],
|
||||
activation_score=source_spot["points"])],
|
||||
time=datetime.fromisoformat(source_spot["timeStamp"].replace("Z", "+00:00")).timestamp())
|
||||
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
return new_spots
|
||||
|
||||
@@ -5,59 +5,66 @@ import pytz
|
||||
from core.config import MAX_SPOT_AGE
|
||||
|
||||
|
||||
# Generic spot provider class. Subclasses of this query the individual APIs for data.
|
||||
class SpotProvider:
|
||||
"""Generic spot provider class. Subclasses of this query the individual APIs for data."""
|
||||
|
||||
# Constructor
|
||||
def __init__(self, provider_config):
|
||||
"""Constructor"""
|
||||
|
||||
self.name = provider_config["name"]
|
||||
self.enabled = provider_config["enabled"]
|
||||
self.last_update_time = datetime.min.replace(tzinfo=pytz.UTC)
|
||||
self.last_spot_time = datetime.min.replace(tzinfo=pytz.UTC)
|
||||
self.status = "Not Started" if self.enabled else "Disabled"
|
||||
self.spots = None
|
||||
self.web_server = None
|
||||
self._spots = None
|
||||
self._web_server = None
|
||||
|
||||
# Set up the provider, e.g. giving it the spot list to work from
|
||||
def setup(self, spots, web_server):
|
||||
self.spots = spots
|
||||
self.web_server = web_server
|
||||
"""Set up the provider, e.g. giving it the spot list to work from"""
|
||||
|
||||
self._spots = spots
|
||||
self._web_server = web_server
|
||||
|
||||
# Start the provider. This should return immediately after spawning threads to access the remote resources
|
||||
def start(self):
|
||||
"""Start the provider. This should return immediately after spawning threads to access the remote resources"""
|
||||
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
# Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved
|
||||
# by this provider will be added to the spot list, to prevent duplications. Spots passing the check will also have
|
||||
# their infer_missing() method called to complete their data set. This is called by the API-querying
|
||||
# subclasses on receiving spots.
|
||||
def submit_batch(self, spots):
|
||||
def _submit_batch(self, spots):
|
||||
"""Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved
|
||||
by this provider will be added to the spot list, to prevent duplications. Spots passing the check will also have
|
||||
their infer_missing() method called to complete their data set. This is called by the API-querying
|
||||
subclasses on receiving spots."""
|
||||
|
||||
# Sort the batch so that earliest ones go in first. This helps keep the ordering correct when spots are fired
|
||||
# off to SSE listeners.
|
||||
spots = sorted(spots, key=lambda spot: (spot.time if spot and spot.time else 0))
|
||||
spots = sorted(spots, key=lambda s: (s.time if s and s.time else 0))
|
||||
for spot in spots:
|
||||
if datetime.fromtimestamp(spot.time, pytz.UTC) > self.last_spot_time:
|
||||
# Fill in any blanks and add to the list
|
||||
spot.infer_missing()
|
||||
self.add_spot(spot)
|
||||
self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC)
|
||||
self._add_spot(spot)
|
||||
if spots:
|
||||
self.last_spot_time = datetime.fromtimestamp(max(map(lambda s: s.time, spots)), pytz.UTC)
|
||||
|
||||
def _submit(self, spot):
|
||||
"""Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots
|
||||
passing the check will also have their infer_missing() method called to complete their data set. This is called by
|
||||
the data streaming subclasses, which can be relied upon not to re-provide old spots."""
|
||||
|
||||
# Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots
|
||||
# passing the check will also have their infer_missing() method called to complete their data set. This is called by
|
||||
# the data streaming subclasses, which can be relied upon not to re-provide old spots.
|
||||
def submit(self, spot):
|
||||
# Fill in any blanks and add to the list
|
||||
spot.infer_missing()
|
||||
self.add_spot(spot)
|
||||
self._add_spot(spot)
|
||||
self.last_spot_time = datetime.fromtimestamp(spot.time, pytz.UTC)
|
||||
|
||||
def add_spot(self, spot):
|
||||
def _add_spot(self, spot):
|
||||
if not spot.expired():
|
||||
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
|
||||
# Ping the web server in case we have any SSE connections that need to see this immediately
|
||||
if self.web_server:
|
||||
self.web_server.notify_new_spot(spot)
|
||||
self._spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
|
||||
# Ping the web server in case we have any SSE connections that need to see this immediately
|
||||
if self._web_server:
|
||||
self._web_server.notify_new_spot(spot)
|
||||
|
||||
# Stop any threads and prepare for application shutdown
|
||||
def stop(self):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
"""Stop any threads and prepare for application shutdown"""
|
||||
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
@@ -10,30 +10,30 @@ from core.constants import HTTP_HEADERS
|
||||
from spotproviders.spot_provider import SpotProvider
|
||||
|
||||
|
||||
# Spot provider using Server-Sent Events.
|
||||
class SSESpotProvider(SpotProvider):
|
||||
"""Spot provider using Server-Sent Events."""
|
||||
|
||||
def __init__(self, provider_config, url):
|
||||
super().__init__(provider_config)
|
||||
self.url = url
|
||||
self.event_source = None
|
||||
self.thread = None
|
||||
self.stopped = False
|
||||
self.last_event_id = None
|
||||
self._url = url
|
||||
self._event_source = None
|
||||
self._thread = None
|
||||
self._stopped = False
|
||||
self._last_event_id = None
|
||||
|
||||
def start(self):
|
||||
logging.info("Set up SSE connection to " + self.name + " spot API.")
|
||||
self.stopped = False
|
||||
self.thread = Thread(target=self.run)
|
||||
self.thread.daemon = True
|
||||
self.thread.start()
|
||||
self._stopped = False
|
||||
self._thread = Thread(target=self._run)
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
|
||||
def stop(self):
|
||||
self.stopped = True
|
||||
if self.event_source:
|
||||
self.event_source.close()
|
||||
if self.thread:
|
||||
self.thread.join()
|
||||
self._stopped = True
|
||||
if self._event_source:
|
||||
self._event_source.close()
|
||||
if self._thread:
|
||||
self._thread.join()
|
||||
|
||||
def _on_open(self):
|
||||
self.status = "Waiting for Data"
|
||||
@@ -41,37 +41,39 @@ class SSESpotProvider(SpotProvider):
|
||||
def _on_error(self):
|
||||
self.status = "Connecting"
|
||||
|
||||
def run(self):
|
||||
while not self.stopped:
|
||||
def _run(self):
|
||||
while not self._stopped:
|
||||
try:
|
||||
logging.debug("Connecting to " + self.name + " spot API...")
|
||||
self.status = "Connecting"
|
||||
with EventSource(self.url, headers=HTTP_HEADERS, latest_event_id=self.last_event_id, timeout=30,
|
||||
with EventSource(self._url, headers=HTTP_HEADERS, latest_event_id=self._last_event_id, timeout=30,
|
||||
on_open=self._on_open, on_error=self._on_error) as event_source:
|
||||
self.event_source = event_source
|
||||
for event in self.event_source:
|
||||
self._event_source = event_source
|
||||
for event in self._event_source:
|
||||
if event.type == 'message':
|
||||
try:
|
||||
self.last_event_id = event.last_event_id
|
||||
new_spot = self.sse_message_to_spot(event.data)
|
||||
self._last_event_id = event.last_event_id
|
||||
new_spot = self._sse_message_to_spot(event.data)
|
||||
if new_spot:
|
||||
self.submit(new_spot)
|
||||
self._submit(new_spot)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(pytz.UTC)
|
||||
logging.debug("Received data from " + self.name + " spot API.")
|
||||
|
||||
except Exception as e:
|
||||
logging.exception("Exception processing message from SSE Spot Provider (" + self.name + ")")
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Exception processing message from SSE Spot Provider (" + self.name + ")")
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception in SSE Spot Provider (" + self.name + ")")
|
||||
else:
|
||||
self.status = "Disconnected"
|
||||
sleep(5) # Wait before trying to reconnect
|
||||
|
||||
# Convert an SSE message received from the API into a spot. The whole message data is provided here so the subclass
|
||||
# implementations can handle the message as JSON, XML, text, whatever the API actually provides.
|
||||
def sse_message_to_spot(self, message_data):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
def _sse_message_to_spot(self, message_data):
|
||||
"""Convert an SSE message received from the API into a spot. The whole message data is provided here so the subclass
|
||||
implementations can handle the message as JSON, XML, text, whatever the API actually provides."""
|
||||
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
@@ -7,15 +7,16 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for UK Packet Radio network API
|
||||
class UKPacketNet(HTTPSpotProvider):
|
||||
"""Spot provider for UK Packet Radio network API"""
|
||||
|
||||
POLL_INTERVAL_SEC = 600
|
||||
SPOTS_URL = "https://nodes.ukpacketradio.network/api/nodedata"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
nodes = http_response.json()["nodes"]
|
||||
@@ -35,20 +36,26 @@ class UKPacketNet(HTTPSpotProvider):
|
||||
# First build a "full" comment combining some of the extra info
|
||||
comment = listed_port["comment"] if "comment" in listed_port else ""
|
||||
comment = (comment + " " + listed_port["mode"]) if "mode" in listed_port else comment
|
||||
comment = (comment + " " + listed_port["modulation"]) if "modulation" in listed_port else comment
|
||||
comment = (comment + " " + str(listed_port["baud"]) + " baud") if "baud" in listed_port and listed_port["baud"] > 0 else comment
|
||||
comment = (comment + " " + listed_port[
|
||||
"modulation"]) if "modulation" in listed_port else comment
|
||||
comment = (comment + " " + str(
|
||||
listed_port["baud"]) + " baud") if "baud" in listed_port and listed_port[
|
||||
"baud"] > 0 else comment
|
||||
|
||||
# Get frequency from the comment if it's not set properly in the data structure. This is
|
||||
# very hacky but a lot of node comments contain their frequency as the first or second
|
||||
# word of their comment, but not in the proper data structure field.
|
||||
freq = listed_port["freq"] if "freq" in listed_port and listed_port["freq"] > 0 else None
|
||||
freq = listed_port["freq"] if "freq" in listed_port and listed_port[
|
||||
"freq"] > 0 else None
|
||||
if not freq and comment:
|
||||
possible_freq = comment.split(" ")[0].upper().replace("MHZ", "")
|
||||
if re.match(r"^[0-9.]+$", possible_freq) and possible_freq != "1200" and possible_freq != "9600":
|
||||
if re.match(r"^[0-9.]+$",
|
||||
possible_freq) and possible_freq != "1200" and possible_freq != "9600":
|
||||
freq = float(possible_freq) * 1000000
|
||||
if not freq and len(comment.split(" ")) > 1:
|
||||
possible_freq = comment.split(" ")[1].upper().replace("MHZ", "")
|
||||
if re.match(r"^[0-9.]+$", possible_freq) and possible_freq != "1200" and possible_freq != "9600":
|
||||
if re.match(r"^[0-9.]+$",
|
||||
possible_freq) and possible_freq != "1200" and possible_freq != "9600":
|
||||
freq = float(possible_freq) * 1000000
|
||||
# Check for a found frequency likely having been in kHz, sorry to all GHz packet folks
|
||||
if freq and freq > 1000000000:
|
||||
@@ -61,8 +68,10 @@ class UKPacketNet(HTTPSpotProvider):
|
||||
freq=freq,
|
||||
mode="PKT",
|
||||
comment=comment,
|
||||
time=datetime.strptime(heard["lastHeard"], "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC).timestamp(),
|
||||
de_grid=node["location"]["locator"] if "locator" in node["location"] else None,
|
||||
time=datetime.strptime(heard["lastHeard"], "%Y-%m-%d %H:%M:%S").replace(
|
||||
tzinfo=pytz.UTC).timestamp(),
|
||||
de_grid=node["location"]["locator"] if "locator" in node[
|
||||
"location"] else None,
|
||||
de_latitude=node["location"]["coords"]["lat"],
|
||||
de_longitude=node["location"]["coords"]["lon"])
|
||||
|
||||
@@ -77,7 +86,8 @@ class UKPacketNet(HTTPSpotProvider):
|
||||
# data, and we can use that to look these up.
|
||||
for spot in new_spots:
|
||||
if spot.dx_call in nodes:
|
||||
spot.dx_grid = nodes[spot.dx_call]["location"]["locator"] if "locator" in nodes[spot.dx_call]["location"] else None
|
||||
spot.dx_grid = nodes[spot.dx_call]["location"]["locator"] if "locator" in nodes[spot.dx_call][
|
||||
"location"] else None
|
||||
spot.dx_latitude = nodes[spot.dx_call]["location"]["coords"]["lat"]
|
||||
spot.dx_longitude = nodes[spot.dx_call]["location"]["coords"]["lon"]
|
||||
|
||||
|
||||
@@ -10,30 +10,30 @@ from core.constants import HTTP_HEADERS
|
||||
from spotproviders.spot_provider import SpotProvider
|
||||
|
||||
|
||||
# Spot provider using websockets.
|
||||
class WebsocketSpotProvider(SpotProvider):
|
||||
"""Spot provider using websockets."""
|
||||
|
||||
def __init__(self, provider_config, url):
|
||||
super().__init__(provider_config)
|
||||
self.url = url
|
||||
self.ws = None
|
||||
self.thread = None
|
||||
self.stopped = False
|
||||
self.last_event_id = None
|
||||
self._url = url
|
||||
self._ws = None
|
||||
self._thread = None
|
||||
self._stopped = False
|
||||
self._last_event_id = None
|
||||
|
||||
def start(self):
|
||||
logging.info("Set up websocket connection to " + self.name + " spot API.")
|
||||
self.stopped = False
|
||||
self.thread = Thread(target=self.run)
|
||||
self.thread.daemon = True
|
||||
self.thread.start()
|
||||
self._stopped = False
|
||||
self._thread = Thread(target=self._run)
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
|
||||
def stop(self):
|
||||
self.stopped = True
|
||||
if self.ws:
|
||||
self.ws.close()
|
||||
if self.thread:
|
||||
self.thread.join()
|
||||
self._stopped = True
|
||||
if self._ws:
|
||||
self._ws.close()
|
||||
if self._thread:
|
||||
self._thread.join()
|
||||
|
||||
def _on_open(self):
|
||||
self.status = "Waiting for Data"
|
||||
@@ -41,26 +41,27 @@ class WebsocketSpotProvider(SpotProvider):
|
||||
def _on_error(self):
|
||||
self.status = "Connecting"
|
||||
|
||||
def run(self):
|
||||
while not self.stopped:
|
||||
def _run(self):
|
||||
while not self._stopped:
|
||||
try:
|
||||
logging.debug("Connecting to " + self.name + " spot API...")
|
||||
self.status = "Connecting"
|
||||
self.ws = create_connection(self.url, header=HTTP_HEADERS)
|
||||
self._ws = create_connection(self._url, header=HTTP_HEADERS)
|
||||
self.status = "Connected"
|
||||
data = self.ws.recv()
|
||||
data = self._ws.recv()
|
||||
if data:
|
||||
try:
|
||||
new_spot = self.ws_message_to_spot(data)
|
||||
new_spot = self._ws_message_to_spot(data)
|
||||
if new_spot:
|
||||
self.submit(new_spot)
|
||||
self._submit(new_spot)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(pytz.UTC)
|
||||
logging.debug("Received data from " + self.name + " spot API.")
|
||||
|
||||
except Exception as e:
|
||||
logging.exception("Exception processing message from Websocket Spot Provider (" + self.name + ")")
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Exception processing message from Websocket Spot Provider (" + self.name + ")")
|
||||
|
||||
except Exception as e:
|
||||
self.status = "Error"
|
||||
@@ -69,7 +70,8 @@ class WebsocketSpotProvider(SpotProvider):
|
||||
self.status = "Disconnected"
|
||||
sleep(5) # Wait before trying to reconnect
|
||||
|
||||
# Convert a WS message received from the API into a spot. The exact message data (in bytes) is provided here so the
|
||||
# subclass implementations can handle the message as string, JSON, XML, whatever the API actually provides.
|
||||
def ws_message_to_spot(self, bytes):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
def _ws_message_to_spot(self, b):
|
||||
"""Convert a WS message received from the API into a spot. The exact message data (in bytes) is provided here so the
|
||||
subclass implementations can handle the message as string, JSON, XML, whatever the API actually provides."""
|
||||
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
@@ -10,8 +10,9 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for Wainwrights on the Air
|
||||
class WOTA(HTTPSpotProvider):
|
||||
"""Spot provider for Wainwrights on the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://www.wota.org.uk/spots_rss.php"
|
||||
LIST_URL = "https://www.wota.org.uk/mapping/data/summits.json"
|
||||
@@ -20,7 +21,7 @@ class WOTA(HTTPSpotProvider):
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
rss = RSSParser.parse(http_response.content.decode())
|
||||
# Iterate through source data
|
||||
@@ -47,6 +48,7 @@ class WOTA(HTTPSpotProvider):
|
||||
freq_mode = desc_split[0].replace("Frequencies/modes:", "").strip()
|
||||
freq_mode_split = re.split(r'[\-\s]+', freq_mode)
|
||||
freq_hz = float(freq_mode_split[0]) * 1000000
|
||||
mode = None
|
||||
if len(freq_mode_split) > 1:
|
||||
mode = freq_mode_split[1].upper()
|
||||
|
||||
|
||||
@@ -6,14 +6,15 @@ from data.spot import Spot
|
||||
from spotproviders.sse_spot_provider import SSESpotProvider
|
||||
|
||||
|
||||
# Spot provider for Worldwide Bunkers on the Air
|
||||
class WWBOTA(SSESpotProvider):
|
||||
"""Spot provider for Worldwide Bunkers on the Air"""
|
||||
|
||||
SPOTS_URL = "https://api.wwbota.net/spots/"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL)
|
||||
|
||||
def sse_message_to_spot(self, message):
|
||||
def _sse_message_to_spot(self, message):
|
||||
source_spot = json.loads(message)
|
||||
# Convert to our spot format. First we unpack references, because WWBOTA spots can have more than one for
|
||||
# n-fer activations.
|
||||
|
||||
@@ -7,15 +7,16 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for Worldwide Flora & Fauna
|
||||
class WWFF(HTTPSpotProvider):
|
||||
"""Spot provider for Worldwide Flora & Fauna"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://spots.wwff.co/static/spots.json"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json():
|
||||
@@ -36,4 +37,4 @@ class WWFF(HTTPSpotProvider):
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
return new_spots
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
from datetime import datetime
|
||||
|
||||
import json
|
||||
import pytz
|
||||
|
||||
from data.sig_ref import SIGRef
|
||||
from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for Towers on the Air
|
||||
class WWTOTA(HTTPSpotProvider):
|
||||
"""Spot provider for Towers on the Air"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://wwtota.com/api/cluster_live.php"
|
||||
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
response_fixed = http_response.text.replace("\\/", "/")
|
||||
response_json = json.loads(response_fixed)
|
||||
@@ -33,9 +33,10 @@ class WWTOTA(HTTPSpotProvider):
|
||||
comment=source_spot["comment"],
|
||||
sig="WWTOTA",
|
||||
sig_refs=[SIGRef(id=source_spot["ref"], sig="WWTOTA")],
|
||||
time=datetime.strptime(response_json["updated"][:10] + source_spot["time"], "%Y-%m-%d%H:%M").timestamp())
|
||||
time=datetime.strptime(response_json["updated"][:10] + source_spot["time"],
|
||||
"%Y-%m-%d%H:%M").timestamp())
|
||||
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
return new_spots
|
||||
|
||||
@@ -10,12 +10,13 @@ from data.spot import Spot
|
||||
from spotproviders.websocket_spot_provider import WebsocketSpotProvider
|
||||
|
||||
|
||||
# Spot provider for servers based on the "xOTA" software at https://github.com/nischu/xOTA/
|
||||
# The provider typically doesn't give us a lat/lon or SIG explicitly, so our own config provides a SIG and a reference
|
||||
# to a local CSV file with location information. This functionality is implemented for TOTA events, of which there are
|
||||
# several - so a plain lookup of a "TOTA reference" doesn't make sense, it depends on which TOTA and hence which server
|
||||
# supplied the data, which is why the CSV location lookup is here and not in sig_utils.
|
||||
class XOTA(WebsocketSpotProvider):
|
||||
"""Spot provider for servers based on the "xOTA" software at https://github.com/nischu/xOTA/
|
||||
The provider typically doesn't give us a lat/lon or SIG explicitly, so our own config provides a SIG and a reference
|
||||
to a local CSV file with location information. This functionality is implemented for TOTA events, of which there are
|
||||
several - so a plain lookup of a "TOTA reference" doesn't make sense, it depends on which TOTA and hence which server
|
||||
supplied the data, which is why the CSV location lookup is here and not in sig_utils."""
|
||||
|
||||
LOCATION_DATA = {}
|
||||
SIG = None
|
||||
|
||||
@@ -35,8 +36,8 @@ class XOTA(WebsocketSpotProvider):
|
||||
except:
|
||||
logging.exception("Could not look up location data for XOTA source.")
|
||||
|
||||
def ws_message_to_spot(self, bytes):
|
||||
string = bytes.decode("utf-8")
|
||||
def _ws_message_to_spot(self, b):
|
||||
string = b.decode("utf-8")
|
||||
source_spot = json.loads(string)
|
||||
ref_id = source_spot["reference"]["title"]
|
||||
lat = float(self.LOCATION_DATA[ref_id]["lat"]) if ref_id in self.LOCATION_DATA else None
|
||||
@@ -47,7 +48,8 @@ class XOTA(WebsocketSpotProvider):
|
||||
freq=float(source_spot["freq"]) * 1000,
|
||||
mode=source_spot["mode"].upper(),
|
||||
sig=self.SIG,
|
||||
sig_refs=[SIGRef(id=ref_id, sig=self.SIG, url=source_spot["reference"]["website"], latitude=lat, longitude=lon)],
|
||||
sig_refs=[SIGRef(id=ref_id, sig=self.SIG, url=source_spot["reference"]["website"], latitude=lat,
|
||||
longitude=lon)],
|
||||
time=datetime.now(pytz.UTC).timestamp(),
|
||||
dx_latitude=lat,
|
||||
dx_longitude=lon,
|
||||
|
||||
@@ -7,8 +7,9 @@ from data.spot import Spot
|
||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||
|
||||
|
||||
# Spot provider for ZLOTA
|
||||
class ZLOTA(HTTPSpotProvider):
|
||||
"""Spot provider for ZLOTA"""
|
||||
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://ontheair.nz/api/spots?zlota_only=true"
|
||||
LIST_URL = "https://ontheair.nz/assets/assets.json"
|
||||
@@ -16,7 +17,7 @@ class ZLOTA(HTTPSpotProvider):
|
||||
def __init__(self, provider_config):
|
||||
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
def _http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json():
|
||||
@@ -35,7 +36,8 @@ class ZLOTA(HTTPSpotProvider):
|
||||
comment=source_spot["comments"],
|
||||
sig="ZLOTA",
|
||||
sig_refs=[SIGRef(id=source_spot["reference"], sig="ZLOTA", name=source_spot["name"])],
|
||||
time=datetime.fromisoformat(source_spot["referenced_time"].replace("Z", "+00:00")).astimezone(pytz.UTC).timestamp())
|
||||
time=datetime.fromisoformat(source_spot["referenced_time"].replace("Z", "+00:00")).astimezone(
|
||||
pytz.UTC).timestamp())
|
||||
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
|
||||
@@ -66,7 +66,7 @@
|
||||
<p>This software is dedicated to the memory of Tom G1PJB, SK, a friend and colleague who sadly passed away around the time I started writing it in Autumn 2025. I was looking forward to showing it to you when it was done.</p>
|
||||
</div>
|
||||
|
||||
<script src="/js/common.js?v=1772180923"></script>
|
||||
<script src="/js/common.js?v=1773090023"></script>
|
||||
<script>$(document).ready(function() { $("#nav-link-about").addClass("active"); }); <!-- highlight active page in nav --></script>
|
||||
|
||||
{% end %}
|
||||
@@ -69,8 +69,8 @@
|
||||
|
||||
</div>
|
||||
|
||||
<script src="/js/common.js?v=1772180923"></script>
|
||||
<script src="/js/add-spot.js?v=1772180923"></script>
|
||||
<script src="/js/common.js?v=1773090023"></script>
|
||||
<script src="/js/add-spot.js?v=1773090023"></script>
|
||||
<script>$(document).ready(function() { $("#nav-link-add-spot").addClass("active"); }); <!-- highlight active page in nav --></script>
|
||||
|
||||
{% end %}
|
||||
@@ -56,8 +56,8 @@
|
||||
|
||||
</div>
|
||||
|
||||
<script src="/js/common.js?v=1772180923"></script>
|
||||
<script src="/js/alerts.js?v=1772180923"></script>
|
||||
<script src="/js/common.js?v=1773090023"></script>
|
||||
<script src="/js/alerts.js?v=1773090023"></script>
|
||||
<script>$(document).ready(function() { $("#nav-link-alerts").addClass("active"); }); <!-- highlight active page in nav --></script>
|
||||
|
||||
{% end %}
|
||||
@@ -62,9 +62,9 @@
|
||||
<script>
|
||||
let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %};
|
||||
</script>
|
||||
<script src="/js/common.js?v=1772180923"></script>
|
||||
<script src="/js/spotsbandsandmap.js?v=1772180923"></script>
|
||||
<script src="/js/bands.js?v=1772180923"></script>
|
||||
<script src="/js/common.js?v=1773090023"></script>
|
||||
<script src="/js/spotsbandsandmap.js?v=1773090023"></script>
|
||||
<script src="/js/bands.js?v=1773090023"></script>
|
||||
<script>$(document).ready(function() { $("#nav-link-bands").addClass("active"); }); <!-- highlight active page in nav --></script>
|
||||
|
||||
{% end %}
|
||||
@@ -13,10 +13,10 @@
|
||||
<meta property="twitter:title" content="Spothole"/>
|
||||
<meta name="description" content="An Amateur Radio spotting tool bringing together DX clusters and outdoor programmes, providing a universal JSON API and web interface."/>
|
||||
<meta property="og:description" content="An Amateur Radio spotting tool bringing together DX clusters and outdoor programmes, providing a universal JSON API and web interface."/>
|
||||
<link rel="canonical" href="https://spothole.app/"/>
|
||||
<meta property="og:url" content="https://spothole.app/"/>
|
||||
<meta property="og:image" content="https://spothole.app/img/banner.png"/>
|
||||
<meta property="twitter:image" content="https://spothole.app/img/banner.png"/>
|
||||
<link rel="canonical" href="{{ baseurl }}{{ current_path }}"/>
|
||||
<meta property="og:url" content="{{ baseurl }}{{ current_path }}"/>
|
||||
<meta property="og:image" content="{{ baseurl }}/img/banner.png"/>
|
||||
<meta property="twitter:image" content="{{ baseurl }}/img/banner.png"/>
|
||||
<meta name="twitter:card" content="summary_large_image"/>
|
||||
<meta name="author" content="Ian Renton"/>
|
||||
<meta property="og:locale" content="en_GB"/>
|
||||
@@ -46,10 +46,10 @@
|
||||
crossorigin="anonymous"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/tinycolor2@1.6.0/cjs/tinycolor.min.js"></script>
|
||||
|
||||
<script src="https://misc.ianrenton.com/jsutils/utils.js?v=1772180923"></script>
|
||||
<script src="https://misc.ianrenton.com/jsutils/storage.js?v=1772180923"></script>
|
||||
<script src="https://misc.ianrenton.com/jsutils/ui-ham.js?v=1772180923"></script>
|
||||
<script src="https://misc.ianrenton.com/jsutils/geo.js?v=1772180923"></script>
|
||||
<script src="https://misc.ianrenton.com/jsutils/utils.js?v=1773090023"></script>
|
||||
<script src="https://misc.ianrenton.com/jsutils/storage.js?v=1773090023"></script>
|
||||
<script src="https://misc.ianrenton.com/jsutils/ui-ham.js?v=1773090023"></script>
|
||||
<script src="https://misc.ianrenton.com/jsutils/geo.js?v=1773090023"></script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -70,9 +70,9 @@
|
||||
<script>
|
||||
let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %};
|
||||
</script>
|
||||
<script src="/js/common.js?v=1772180923"></script>
|
||||
<script src="/js/spotsbandsandmap.js?v=1772180923"></script>
|
||||
<script src="/js/map.js?v=1772180923"></script>
|
||||
<script src="/js/common.js?v=1773090023"></script>
|
||||
<script src="/js/spotsbandsandmap.js?v=1773090023"></script>
|
||||
<script src="/js/map.js?v=1773090023"></script>
|
||||
<script>$(document).ready(function() { $("#nav-link-map").addClass("active"); }); <!-- highlight active page in nav --></script>
|
||||
|
||||
{% end %}
|
||||
@@ -87,9 +87,9 @@
|
||||
<script>
|
||||
let spotProvidersEnabledByDefault = {% raw json_encode(web_ui_options["spot-providers-enabled-by-default"]) %};
|
||||
</script>
|
||||
<script src="/js/common.js?v=1772180923"></script>
|
||||
<script src="/js/spotsbandsandmap.js?v=1772180923"></script>
|
||||
<script src="/js/spots.js?v=1772180923"></script>
|
||||
<script src="/js/common.js?v=1773090023"></script>
|
||||
<script src="/js/spotsbandsandmap.js?v=1773090023"></script>
|
||||
<script src="/js/spots.js?v=1773090023"></script>
|
||||
<script>$(document).ready(function() { $("#nav-link-spots").addClass("active"); }); <!-- highlight active page in nav --></script>
|
||||
|
||||
{% end %}
|
||||
@@ -3,8 +3,8 @@
|
||||
|
||||
<div id="status-container" class="row row-cols-1 row-cols-md-4 g-4 mt-4"></div>
|
||||
|
||||
<script src="/js/common.js?v=1772180923"></script>
|
||||
<script src="/js/status.js?v=1772180923"></script>
|
||||
<script src="/js/common.js?v=1773090023"></script>
|
||||
<script src="/js/status.js?v=1773090023"></script>
|
||||
<script>$(document).ready(function() { $("#nav-link-status").addClass("active"); }); <!-- highlight active page in nav --></script>
|
||||
|
||||
{% end %}
|
||||
@@ -13,13 +13,14 @@ cache = CachedSession("/tmp/cache", expire_after=timedelta(days=30))
|
||||
data = cache.get("https://raw.githubusercontent.com/k0swe/dxcc-json/refs/heads/main/dxcc.json").json()
|
||||
|
||||
for dxcc in data["dxcc"]:
|
||||
id = dxcc["entityCode"]
|
||||
dxcc_id = dxcc["entityCode"]
|
||||
flag = dxcc["flag"]
|
||||
image = Image.new("RGBA", (140, 110), (255, 0, 0, 0))
|
||||
draw = ImageDraw.Draw(image)
|
||||
draw.text((0, -10), flag, font=ImageFont.truetype("/usr/share/fonts/truetype/noto/NotoColorEmoji.ttf", 109), embedded_color=True)
|
||||
outfile = str(id) + ".png"
|
||||
draw.text((0, -10), flag, font=ImageFont.truetype("/usr/share/fonts/truetype/noto/NotoColorEmoji.ttf", 109),
|
||||
embedded_color=True)
|
||||
outfile = str(dxcc_id) + ".png"
|
||||
image.save(outfile, "PNG")
|
||||
|
||||
image = Image.new("RGBA", (140, 110), (255, 0, 0, 0))
|
||||
image.save("999.png", "PNG")
|
||||
image.save("999.png", "PNG")
|
||||
|
||||
Reference in New Issue
Block a user