Implement basic API server

This commit is contained in:
Ian Renton
2025-09-27 14:27:39 +01:00
parent 4077f835b7
commit 6d735cfc67
12 changed files with 130 additions and 28 deletions

View File

@@ -25,14 +25,14 @@ class DXCluster(Provider):
self.hostname = hostname
self.port = port
self.telnet = None
self.thread = None
self.thread = Thread(target=self.handle)
self.thread.daemon = True
self.run = True
def name(self):
return "DX Cluster " + self.hostname
def start(self):
self.thread = Thread(target=self.handle)
self.thread.start()
def stop(self):
@@ -46,10 +46,12 @@ class DXCluster(Provider):
while not connected and self.run:
try:
self.status = "Connecting"
logging.info("DX Cluster " + self.hostname + " connecting...")
self.telnet = telnetlib3.Telnet(self.hostname, self.port)
self.telnet.read_until("login: ".encode("ascii"))
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("ascii"))
self.telnet.read_until("login: ".encode("utf-8"))
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("utf-8"))
connected = True
logging.info("DX Cluster " + self.hostname + " connected.")
except Exception as e:
self.status = "Error"
logging.exception("Exception while connecting to DX Cluster Provider (" + self.hostname + ").")
@@ -59,8 +61,8 @@ class DXCluster(Provider):
while connected and self.run:
try:
# Check new telnet info against regular expression
telnet_output = self.telnet.read_until("\n".encode("ascii"))
match = self.LINE_PATTERN.match(telnet_output.decode("ascii"))
telnet_output = self.telnet.read_until("\n".encode("utf-8"))
match = self.LINE_PATTERN.match(telnet_output.decode("utf-8"))
if match:
spot_time = datetime.strptime(match.group(5), "%H%MZ")
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
@@ -73,10 +75,11 @@ class DXCluster(Provider):
# Fill in any blanks
spot.infer_missing()
# Add to our list
self.submit([spot])
self.submit(spot)
self.status = "OK"
self.last_update_time = datetime.now(timezone.utc)
logging.debug("Data received from DX Cluster " + self.hostname + ".")
except Exception as e:
connected = False
@@ -85,6 +88,7 @@ class DXCluster(Provider):
logging.exception("Exception in DX Cluster Provider (" + self.hostname + ")")
sleep(5)
else:
logging.info("DX Cluster " + self.hostname + " shutting down...")
self.status = "Shutting down"
self.status = "Disconnected"

View File

@@ -26,7 +26,9 @@ class HTTPProvider(Provider):
# Fire off a one-shot thread to run poll() for the first time, just to ensure start() returns immediately and
# the application can continue starting. The thread itself will then die, and the timer will kick in on its own
# thread.
logging.info("Set up query of " + self.name() + " API every " + str(self.poll_interval) + " seconds.")
thread = Thread(target=self.poll)
thread.daemon = True
thread.start()
def stop(self):
@@ -35,15 +37,17 @@ class HTTPProvider(Provider):
def poll(self):
try:
# Request data from API
logging.debug("Polling " + self.name() + " API...")
http_response = requests.get(self.url, headers=self.HTTP_HEADERS)
# Pass off to the subclass for processing
new_spots = self.http_response_to_spots(http_response)
# Submit the new spots for processing. There might not be any spots for the less popular programs.
if new_spots:
self.submit(new_spots)
self.submit_batch(new_spots)
self.status = "OK"
self.last_update_time = datetime.now(pytz.UTC)
logging.debug("Received data from " + self.name() + " API.")
except Exception as e:
self.status = "Error"

View File

@@ -1,3 +1,4 @@
import logging
from datetime import datetime
import pytz
@@ -35,7 +36,7 @@ class ParksNPeaks(HTTPProvider):
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore.
if spot.sig not in ["POTA", "SOTA", "WWFF"]:
print("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!")
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!")
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do

View File

@@ -30,15 +30,21 @@ class Provider:
def start(self):
raise NotImplementedError("Subclasses must implement this method")
# Submit one or more new spots retrieved from the provider. Only spots that are newer than the last spot retrieved
# by this provider will be added to the spot list, to prevent duplications. This is called by the subclasses on
# receiving spots.
def submit(self, spots):
# Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved
# by this provider will be added to the spot list, to prevent duplications. This is called by the API-querying
# subclasses on receiving spots.
def submit_batch(self, spots):
for spot in spots:
if spot.time > self.last_spot_time:
self.spot_list.append(spot)
self.last_spot_time = max(map(lambda s: s.time, spots))
# Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. This is
# called by the data streaming subclasses, which can be relied upon not to re-provide old spots.
def submit(self, spot):
self.spot_list.append(spot)
self.last_spot_time = spot.time
# Stop any threads and prepare for application shutdown
def stop(self):
raise NotImplementedError("Subclasses must implement this method")