mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2025-10-27 16:59:25 +00:00
Add data providers for most other programmes
This commit is contained in:
@@ -1,19 +1,23 @@
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
|
||||
import pytz
|
||||
import telnetlib3
|
||||
|
||||
from core.constants import SERVER_OWNER_CALLSIGN
|
||||
from data.spot import Spot
|
||||
from providers.provider import Provider
|
||||
import telnetlib3
|
||||
import re
|
||||
|
||||
callsign_pattern = "([a-z|0-9|/]+)"
|
||||
frequency_pattern = "([0-9|.]+)"
|
||||
pattern = re.compile("^DX de "+callsign_pattern+":\\s+"+frequency_pattern+"\\s+"+callsign_pattern+"\\s+(.*)\\s+(\\d{4}Z)", re.IGNORECASE)
|
||||
|
||||
# Provider for a DX Cluster. Hostname and port provided as parameters.
|
||||
class DXCluster(Provider):
|
||||
CALLSIGN_PATTERN = "([a-z|0-9|/]+)"
|
||||
FREQUENCY_PATTERM = "([0-9|.]+)"
|
||||
LINE_PATTERN = re.compile(
|
||||
"^DX de " + CALLSIGN_PATTERN + ":\\s+" + FREQUENCY_PATTERM + "\\s+" + CALLSIGN_PATTERN + "\\s+(.*)\\s+(\\d{4}Z)",
|
||||
re.IGNORECASE)
|
||||
|
||||
# Constructor requires hostname and port
|
||||
def __init__(self, hostname, port):
|
||||
@@ -25,7 +29,7 @@ class DXCluster(Provider):
|
||||
self.run = True
|
||||
|
||||
def name(self):
|
||||
return "DX Cluster " + self.hostname + " " + str(self.port)
|
||||
return "DX Cluster " + self.hostname
|
||||
|
||||
def start(self):
|
||||
self.thread = Thread(target=self.handle)
|
||||
@@ -37,29 +41,50 @@ class DXCluster(Provider):
|
||||
self.thread.join()
|
||||
|
||||
def handle(self):
|
||||
self.status = "Connecting"
|
||||
self.telnet = telnetlib3.Telnet(self.hostname, self.port)
|
||||
self.telnet.read_until("login: ".encode("ascii"))
|
||||
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("ascii"))
|
||||
self.status = "Waiting for Data"
|
||||
|
||||
while self.run:
|
||||
# Check new telnet info against regular expression
|
||||
telnet_output = self.telnet.read_until("\n".encode("ascii"))
|
||||
match = pattern.match(telnet_output.decode("ascii"))
|
||||
if match:
|
||||
spot_time = datetime.strptime(match.group(5), "%H%MZ")
|
||||
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
|
||||
spot = Spot(source=self.name(),
|
||||
dx_call=match.group(3),
|
||||
de_call=match.group(1),
|
||||
freq=float(match.group(2)),
|
||||
comment=match.group(4).strip(),
|
||||
time=spot_datetime)
|
||||
# Fill in any blanks
|
||||
spot.infer_missing()
|
||||
# Add to our list
|
||||
self.submit([spot])
|
||||
connected = False
|
||||
while not connected and self.run:
|
||||
try:
|
||||
self.status = "Connecting"
|
||||
self.telnet = telnetlib3.Telnet(self.hostname, self.port)
|
||||
self.telnet.read_until("login: ".encode("ascii"))
|
||||
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("ascii"))
|
||||
connected = True
|
||||
except Exception as e:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception while connecting to DX Cluster Provider (" + self.hostname + ").")
|
||||
sleep(5)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(timezone.utc)
|
||||
self.status = "Waiting for Data"
|
||||
while connected and self.run:
|
||||
try:
|
||||
# Check new telnet info against regular expression
|
||||
telnet_output = self.telnet.read_until("\n".encode("ascii"))
|
||||
match = self.LINE_PATTERN.match(telnet_output.decode("ascii"))
|
||||
if match:
|
||||
spot_time = datetime.strptime(match.group(5), "%H%MZ")
|
||||
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
|
||||
spot = Spot(source=self.name(),
|
||||
dx_call=match.group(3),
|
||||
de_call=match.group(1),
|
||||
freq=float(match.group(2)),
|
||||
comment=match.group(4).strip(),
|
||||
time=spot_datetime)
|
||||
# Fill in any blanks
|
||||
spot.infer_missing()
|
||||
# Add to our list
|
||||
self.submit([spot])
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(timezone.utc)
|
||||
|
||||
except Exception as e:
|
||||
connected = False
|
||||
if self.run:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception in DX Cluster Provider (" + self.hostname + ")")
|
||||
sleep(5)
|
||||
else:
|
||||
self.status = "Shutting down"
|
||||
|
||||
self.status = "Disconnected"
|
||||
51
providers/gma.py
Normal file
51
providers/gma.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import pytz
|
||||
from requests_cache import CachedSession
|
||||
|
||||
from data.spot import Spot
|
||||
from providers.http_provider import HTTPProvider
|
||||
|
||||
|
||||
# Provider for General Mountain Activity
|
||||
class GMA(HTTPProvider):
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://www.cqgma.org/api/spots/25/"
|
||||
# GMA spots don't contain the details of the programme they are for, we need a separate lookup for that
|
||||
REF_INFO_URL_ROOT = "https://www.cqgma.org/api/ref/?"
|
||||
REF_INFO_CACHE_TIME_DAYS = 30
|
||||
REF_INFO_CACHE = CachedSession("gma_ref_info_cache", expire_after=timedelta(days=REF_INFO_CACHE_TIME_DAYS))
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def name(self):
|
||||
return "GMA"
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json()["RCD"]:
|
||||
# Convert to our spot format
|
||||
spot = Spot(source=self.name(),
|
||||
dx_call=source_spot["ACTIVATOR"].upper(),
|
||||
de_call=source_spot["SPOTTER"].upper(),
|
||||
freq=float(source_spot["QRG"]),
|
||||
mode=source_spot["MODE"].upper(),
|
||||
comment=source_spot["TEXT"],
|
||||
sig_refs=[source_spot["REF"]],
|
||||
sig_refs_names=[source_spot["NAME"]],
|
||||
time=datetime.strptime(source_spot["DATE"] + source_spot["TIME"], "%Y%m%d%H%M").replace(tzinfo=pytz.UTC),
|
||||
latitude=float(source_spot["LAT"]),
|
||||
longitude=float(source_spot["LON"]))
|
||||
|
||||
# GMA doesn't give what programme (SIG) the reference is for until we separately look it up.
|
||||
ref_info = self.REF_INFO_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"], headers=self.HTTP_HEADERS).json()
|
||||
spot.sig = ref_info["reftype"]
|
||||
|
||||
# Fill in any missing data
|
||||
spot.infer_missing()
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
59
providers/http_provider.py
Normal file
59
providers/http_provider.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from threading import Timer, Thread
|
||||
from time import sleep
|
||||
|
||||
import requests
|
||||
|
||||
from providers.provider import Provider
|
||||
|
||||
|
||||
# Generic data provider class for providers that request data via HTTP(S). Just for convenience to avoid code
|
||||
# duplication. Subclasses of this query the individual APIs for data.
|
||||
class HTTPProvider(Provider):
|
||||
|
||||
def __init__(self, url, poll_interval):
|
||||
super().__init__()
|
||||
self.url = url
|
||||
self.poll_interval = poll_interval
|
||||
self.poll_timer = None
|
||||
|
||||
def name(self):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
def start(self):
|
||||
# Fire off a one-shot thread to run poll() for the first time, just to ensure start() returns immediately and
|
||||
# the application can continue starting. The thread itself will then die, and the timer will kick in on its own
|
||||
# thread.
|
||||
thread = Thread(target=self.poll)
|
||||
thread.start()
|
||||
|
||||
def stop(self):
|
||||
self.poll_timer.cancel()
|
||||
|
||||
def poll(self):
|
||||
try:
|
||||
# Request data from API
|
||||
http_response = requests.get(self.url, headers=self.HTTP_HEADERS)
|
||||
# Pass off to the subclass for processing
|
||||
new_spots = self.http_response_to_spots(http_response)
|
||||
# Submit the new spots for processing. There might not be any spots for the less popular programs.
|
||||
if new_spots:
|
||||
self.submit(new_spots)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(timezone.utc)
|
||||
|
||||
except Exception as e:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception in HTTP JSON Provider (" + self.name() + ")")
|
||||
sleep(1)
|
||||
|
||||
self.poll_timer = Timer(self.poll_interval, self.poll)
|
||||
self.poll_timer.start()
|
||||
|
||||
# Convert an HTTP response returned by the API into spot data. The whole response is provided here so the subclass
|
||||
# implementations can check for HTTP status codes if necessary, and handle the response as JSON, XML, text, whatever
|
||||
# the API actually provides.
|
||||
def http_response_to_spots(self, http_response):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
@@ -1,65 +1,44 @@
|
||||
from datetime import datetime, timezone
|
||||
import pytz
|
||||
from data.spot import Spot
|
||||
from providers.provider import Provider
|
||||
from threading import Timer
|
||||
import requests
|
||||
from datetime import datetime
|
||||
|
||||
class POTA(Provider):
|
||||
import pytz
|
||||
|
||||
from data.spot import Spot
|
||||
from providers.http_provider import HTTPProvider
|
||||
|
||||
|
||||
# Provider for Parks on the Air
|
||||
class POTA(HTTPProvider):
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://api.pota.app/spot/activator"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.poll_timer = None
|
||||
super().__init__(self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def name(self):
|
||||
return "POTA"
|
||||
|
||||
def start(self):
|
||||
self.poll()
|
||||
|
||||
def stop(self):
|
||||
self.poll_timer.cancel()
|
||||
|
||||
def poll(self):
|
||||
try:
|
||||
# Request data from API
|
||||
source_data = requests.get(self.SPOTS_URL, headers=self.HTTP_HEADERS).json()
|
||||
# Build a list of spots we haven't seen before
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in source_data:
|
||||
# Convert to our spot format
|
||||
spot = Spot(source=self.name(),
|
||||
source_id=source_spot["spotId"],
|
||||
dx_call=source_spot["activator"],
|
||||
de_call=source_spot["spotter"],
|
||||
freq=float(source_spot["frequency"]),
|
||||
mode=source_spot["mode"],
|
||||
comment=source_spot["comments"],
|
||||
sig="POTA",
|
||||
sig_refs=[source_spot["reference"]],
|
||||
sig_refs_names=[source_spot["name"]],
|
||||
time=datetime.strptime(source_spot["spotTime"], "%Y-%m-%dT%H:%M:%S").replace(tzinfo=pytz.UTC),
|
||||
grid=source_spot["grid6"],
|
||||
latitude=source_spot["latitude"],
|
||||
longitude=source_spot["longitude"],
|
||||
qrt="QRT" in source_spot["comments"].upper())
|
||||
# Fill in any blanks
|
||||
spot.infer_missing()
|
||||
# Add to our list
|
||||
new_spots.append(spot)
|
||||
|
||||
# Submit the new spots for processing
|
||||
self.submit(new_spots)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(timezone.utc)
|
||||
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
self.status = "Error"
|
||||
|
||||
self.poll_timer = Timer(self.POLL_INTERVAL_SEC, self.poll)
|
||||
self.poll_timer.start()
|
||||
def http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json():
|
||||
# Convert to our spot format
|
||||
spot = Spot(source=self.name(),
|
||||
source_id=source_spot["spotId"],
|
||||
dx_call=source_spot["activator"].upper(),
|
||||
de_call=source_spot["spotter"].upper(),
|
||||
freq=float(source_spot["frequency"]),
|
||||
mode=source_spot["mode"].upper(),
|
||||
comment=source_spot["comments"],
|
||||
sig="POTA",
|
||||
sig_refs=[source_spot["reference"]],
|
||||
sig_refs_names=[source_spot["name"]],
|
||||
time=datetime.strptime(source_spot["spotTime"], "%Y-%m-%dT%H:%M:%S").replace(tzinfo=pytz.UTC),
|
||||
grid=source_spot["grid6"],
|
||||
latitude=source_spot["latitude"],
|
||||
longitude=source_spot["longitude"])
|
||||
# Fill in any missing data
|
||||
spot.infer_missing()
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
@@ -1,7 +1,10 @@
|
||||
from datetime import datetime
|
||||
|
||||
import pytz
|
||||
|
||||
from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION
|
||||
|
||||
|
||||
# Generic data provider class. Subclasses of this query the individual APIs for data.
|
||||
class Provider:
|
||||
|
||||
|
||||
68
providers/sota.py
Normal file
68
providers/sota.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import requests
|
||||
from requests_cache import CachedSession
|
||||
|
||||
from data.spot import Spot
|
||||
from providers.http_provider import HTTPProvider
|
||||
|
||||
|
||||
# Provider for Summits on the Air
|
||||
class SOTA(HTTPProvider):
|
||||
POLL_INTERVAL_SEC = 120
|
||||
# SOTA wants us to check for an "epoch" from the API and see if it's actually changed before querying the main data
|
||||
# APIs. So it's actually the EPOCH_URL that we pass into the constructor and get the superclass to call on a timer.
|
||||
# The actual data lookup all happens after parsing and checking the epoch.
|
||||
EPOCH_URL = "https://api-db2.sota.org.uk/api/spots/epoch"
|
||||
SPOTS_URL = "https://api-db2.sota.org.uk/api/spots/60/all/all"
|
||||
# SOTA spots don't contain lat/lon, we need a separate lookup for that
|
||||
SUMMIT_URL_ROOT = "https://api-db2.sota.org.uk/api/summits/"
|
||||
SUMMIT_DATA_CACHE_TIME_DAYS = 30
|
||||
SUMMIT_DATA_CACHE = CachedSession("sota_summit_data_cache", expire_after=timedelta(days=SUMMIT_DATA_CACHE_TIME_DAYS))
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(self.EPOCH_URL, self.POLL_INTERVAL_SEC)
|
||||
self.api_epoch = ""
|
||||
|
||||
def name(self):
|
||||
return "SOTA"
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
# OK, source data is actually just the epoch at this point. We'll then go on to fetch real data if we know this
|
||||
# has changed.
|
||||
epoch_changed = http_response.text != self.api_epoch
|
||||
self.api_epoch = http_response.text
|
||||
|
||||
new_spots = []
|
||||
# OK, if the epoch actually changed, now we make the real request for data.
|
||||
if epoch_changed:
|
||||
source_data = requests.get(self.SPOTS_URL, headers=self.HTTP_HEADERS).json()
|
||||
# Iterate through source data
|
||||
for source_spot in source_data:
|
||||
# Convert to our spot format
|
||||
spot = Spot(source=self.name(),
|
||||
source_id=source_spot["id"],
|
||||
dx_call=source_spot["activatorCallsign"].upper(),
|
||||
dx_name=source_spot["activatorName"],
|
||||
de_call=source_spot["callsign"].upper(),
|
||||
freq=(float(source_spot["frequency"]) * 1000) if (source_spot["frequency"] is not None) else None, # Seen SOTA spots with no frequency!
|
||||
mode=source_spot["mode"].upper(),
|
||||
comment=source_spot["comments"],
|
||||
sig="SOTA",
|
||||
sig_refs=[source_spot["summitCode"]],
|
||||
sig_refs_names=[source_spot["summitName"]],
|
||||
time=datetime.fromisoformat(source_spot["timeStamp"]),
|
||||
activation_score=source_spot["points"])
|
||||
|
||||
# SOTA doesn't give summit lat/lon/grid in the main call, so we need another separate call for this
|
||||
summit_data = self.SUMMIT_DATA_CACHE.get(self.SUMMIT_URL_ROOT + source_spot["summitCode"], headers=self.HTTP_HEADERS).json()
|
||||
spot.grid = summit_data["locator"]
|
||||
spot.latitude = summit_data["latitude"]
|
||||
spot.longitude = summit_data["longitude"]
|
||||
|
||||
# Fill in any missing data
|
||||
spot.infer_missing()
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
51
providers/wwbota.py
Normal file
51
providers/wwbota.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from datetime import datetime
|
||||
|
||||
from data.spot import Spot
|
||||
from providers.http_provider import HTTPProvider
|
||||
|
||||
|
||||
# Provider for Worldwide Bunkers on the Air
|
||||
class WWBOTA(HTTPProvider):
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://api.wwbota.org/spots/"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def name(self):
|
||||
return "WWBOTA"
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json():
|
||||
# Convert to our spot format. First we unpack references, because WWBOTA spots can have more than one for
|
||||
# n-fer activations.
|
||||
refs = []
|
||||
ref_names = []
|
||||
for ref in source_spot["references"]:
|
||||
refs.append(ref["reference"])
|
||||
ref_names.append(ref["name"])
|
||||
spot = Spot(source=self.name(),
|
||||
dx_call=source_spot["call"].upper(),
|
||||
de_call=source_spot["spotter"].upper(),
|
||||
freq=float(source_spot["freq"]) * 1000, # MHz to kHz
|
||||
mode=source_spot["mode"].upper(),
|
||||
comment=source_spot["comment"],
|
||||
sig="WWBOTA",
|
||||
sig_refs=refs,
|
||||
sig_refs_names=ref_names,
|
||||
time=datetime.fromisoformat(source_spot["time"]),
|
||||
# WWBOTA spots can contain multiple references for bunkers being activated simultaneously. For
|
||||
# now, we will just pick the first one to use as our grid, latitude and longitude.
|
||||
grid=source_spot["references"][0]["locator"],
|
||||
latitude=source_spot["references"][0]["lat"],
|
||||
longitude=source_spot["references"][0]["long"],
|
||||
qrt=source_spot["type"] == "QRT")
|
||||
# Fill in any missing data
|
||||
spot.infer_missing()
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us. But WWBOTA does support a special "Test" spot type, we need to avoid adding that.
|
||||
if source_spot["type"] != "Test":
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
43
providers/wwff.py
Normal file
43
providers/wwff.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from datetime import datetime
|
||||
|
||||
import pytz
|
||||
|
||||
from data.spot import Spot
|
||||
from providers.http_provider import HTTPProvider
|
||||
|
||||
|
||||
# Provider for Worldwide Flora & Fauna
|
||||
class WWFF(HTTPProvider):
|
||||
POLL_INTERVAL_SEC = 120
|
||||
SPOTS_URL = "https://spots.wwff.co/static/spots.json"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
||||
|
||||
def name(self):
|
||||
return "WWFF"
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
new_spots = []
|
||||
# Iterate through source data
|
||||
for source_spot in http_response.json():
|
||||
# Convert to our spot format
|
||||
spot = Spot(source=self.name(),
|
||||
source_id=source_spot["id"],
|
||||
dx_call=source_spot["activator"].upper(),
|
||||
de_call=source_spot["spotter"].upper(),
|
||||
freq=float(source_spot["frequency_khz"]),
|
||||
mode=source_spot["mode"].upper(),
|
||||
comment=source_spot["remarks"],
|
||||
sig="WWFF",
|
||||
sig_refs=[source_spot["reference"]],
|
||||
sig_refs_names=[source_spot["reference_name"]],
|
||||
time=datetime.fromtimestamp(source_spot["spot_time"]).replace(tzinfo=pytz.UTC),
|
||||
latitude=source_spot["latitude"],
|
||||
longitude=source_spot["longitude"])
|
||||
# Fill in any missing data
|
||||
spot.infer_missing()
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
Reference in New Issue
Block a user