mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2025-10-27 16:59:25 +00:00
Implement basic API server
This commit is contained in:
@@ -31,3 +31,4 @@ Suggested names so far:
|
|||||||
* Spot-o-Tron
|
* Spot-o-Tron
|
||||||
* Basic Universal Radio Program (BURP)
|
* Basic Universal Radio Program (BURP)
|
||||||
* The Spotinator
|
* The Spotinator
|
||||||
|
* DX Cluster API
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
# Provides a timed cleanup of the spot list.
|
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from threading import Timer
|
from threading import Timer
|
||||||
@@ -7,6 +6,7 @@ from time import sleep
|
|||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
|
|
||||||
|
# Provides a timed cleanup of the spot list.
|
||||||
class CleanupTimer:
|
class CleanupTimer:
|
||||||
|
|
||||||
# Constructor
|
# Constructor
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ SOFTWARE_VERSION = "0.1"
|
|||||||
|
|
||||||
# Todo make configurable
|
# Todo make configurable
|
||||||
SERVER_OWNER_CALLSIGN = "M0TRT"
|
SERVER_OWNER_CALLSIGN = "M0TRT"
|
||||||
|
WEB_SERVER_PORT = 8080
|
||||||
MAX_SPOT_AGE_SEC = 3600
|
MAX_SPOT_AGE_SEC = 3600
|
||||||
|
|
||||||
# Modes
|
# Modes
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODES, ALL_MODES
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from pyhamtools import LookupLib, Callinfo
|
from pyhamtools import LookupLib, Callinfo
|
||||||
|
|
||||||
|
from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODES, ALL_MODES
|
||||||
|
|
||||||
# Static lookup helpers from pyhamtools
|
# Static lookup helpers from pyhamtools
|
||||||
# todo in future add QRZ as a second lookup option in case it provides more data?
|
# todo in future add QRZ as a second lookup option in case it provides more data?
|
||||||
lookuplib = LookupLib(lookuptype="countryfile")
|
lookuplib = LookupLib(lookuptype="countryfile")
|
||||||
@@ -23,7 +27,7 @@ def infer_mode_family_from_mode(mode):
|
|||||||
return "DATA"
|
return "DATA"
|
||||||
else:
|
else:
|
||||||
if mode.upper() != "OTHER":
|
if mode.upper() != "OTHER":
|
||||||
print("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
|
logging.warn("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Infer a band from a frequency in kHz
|
# Infer a band from a frequency in kHz
|
||||||
@@ -67,3 +71,11 @@ def infer_itu_zone_from_callsign(call):
|
|||||||
return callinfo.get_ituz(call)
|
return callinfo.get_ituz(call)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things.
|
||||||
|
# Converts datetimes to ISO.
|
||||||
|
# Anything else it tries to convert to a dict.
|
||||||
|
def serialize_everything(obj):
|
||||||
|
if isinstance(obj, datetime):
|
||||||
|
return obj.isoformat()
|
||||||
|
return obj.__dict__
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import json
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
@@ -132,3 +133,7 @@ class Spot:
|
|||||||
# TODO use QRZ/HamQTH provider to get grids, lat Lon, when missing; and DX name
|
# TODO use QRZ/HamQTH provider to get grids, lat Lon, when missing; and DX name
|
||||||
# credentials in config file which is .gitignored; sample provided
|
# credentials in config file which is .gitignored; sample provided
|
||||||
# TODO lat/lon from DXCC centre as last resort?
|
# TODO lat/lon from DXCC centre as last resort?
|
||||||
|
|
||||||
|
# JSON serialise
|
||||||
|
def to_json(self):
|
||||||
|
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True)
|
||||||
37
main.py
37
main.py
@@ -1,8 +1,11 @@
|
|||||||
# Main script
|
# Main script
|
||||||
|
import logging
|
||||||
import signal
|
import signal
|
||||||
|
import sys
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
from core.cleanup import CleanupTimer
|
from core.cleanup import CleanupTimer
|
||||||
from core.constants import MAX_SPOT_AGE_SEC
|
from core.constants import MAX_SPOT_AGE_SEC, WEB_SERVER_PORT
|
||||||
from providers.dxcluster import DXCluster
|
from providers.dxcluster import DXCluster
|
||||||
from providers.gma import GMA
|
from providers.gma import GMA
|
||||||
from providers.hema import HEMA
|
from providers.hema import HEMA
|
||||||
@@ -11,17 +14,32 @@ from providers.pota import POTA
|
|||||||
from providers.sota import SOTA
|
from providers.sota import SOTA
|
||||||
from providers.wwbota import WWBOTA
|
from providers.wwbota import WWBOTA
|
||||||
from providers.wwff import WWFF
|
from providers.wwff import WWFF
|
||||||
|
from server.webserver import WebServer
|
||||||
|
|
||||||
|
# Main control flag, switch False to stop main application thread
|
||||||
|
run = True
|
||||||
|
|
||||||
# Shutdown function
|
# Shutdown function
|
||||||
def shutdown(sig, frame):
|
def shutdown(sig, frame):
|
||||||
print("Stopping program, this may take a few seconds...")
|
logging.info("Stopping program, this may take a few seconds...")
|
||||||
|
global run
|
||||||
|
run = False
|
||||||
for p in providers: p.stop()
|
for p in providers: p.stop()
|
||||||
cleanup_timer.stop()
|
cleanup_timer.stop()
|
||||||
|
|
||||||
|
|
||||||
# Main function
|
# Main function
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
print("Starting...")
|
# Set up logging
|
||||||
|
root = logging.getLogger()
|
||||||
|
root.setLevel(logging.INFO)
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.INFO)
|
||||||
|
formatter = logging.Formatter("%(message)s")
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
root.addHandler(handler)
|
||||||
|
logging.info("Starting...")
|
||||||
|
|
||||||
# Shut down gracefully on SIGINT
|
# Shut down gracefully on SIGINT
|
||||||
signal.signal(signal.SIGINT, shutdown)
|
signal.signal(signal.SIGINT, shutdown)
|
||||||
|
|
||||||
@@ -48,13 +66,14 @@ if __name__ == '__main__':
|
|||||||
cleanup_timer = CleanupTimer(spot_list=spot_list, cleanup_interval=60, max_spot_age=MAX_SPOT_AGE_SEC)
|
cleanup_timer = CleanupTimer(spot_list=spot_list, cleanup_interval=60, max_spot_age=MAX_SPOT_AGE_SEC)
|
||||||
cleanup_timer.start()
|
cleanup_timer.start()
|
||||||
|
|
||||||
# Todo serve spot API
|
# Set up web server
|
||||||
# Todo spot API arguments e.g. "since" based on received_time of spots, sources, sigs, dx cont, dxcc, de cont, band, mode, filter out qrt, filter pre-qsy
|
web_server = WebServer(spot_list=spot_list, port=WEB_SERVER_PORT)
|
||||||
# Todo serve status API
|
web_server.start()
|
||||||
# Todo serve apidocs
|
|
||||||
# Todo serve website
|
|
||||||
|
|
||||||
print("Startup complete.")
|
logging.info("Startup complete.")
|
||||||
|
|
||||||
|
while run:
|
||||||
|
sleep(1)
|
||||||
|
|
||||||
|
|
||||||
# TODO NOTES FOR NGINX REVERSE PROXY
|
# TODO NOTES FOR NGINX REVERSE PROXY
|
||||||
|
|||||||
@@ -25,14 +25,14 @@ class DXCluster(Provider):
|
|||||||
self.hostname = hostname
|
self.hostname = hostname
|
||||||
self.port = port
|
self.port = port
|
||||||
self.telnet = None
|
self.telnet = None
|
||||||
self.thread = None
|
self.thread = Thread(target=self.handle)
|
||||||
|
self.thread.daemon = True
|
||||||
self.run = True
|
self.run = True
|
||||||
|
|
||||||
def name(self):
|
def name(self):
|
||||||
return "DX Cluster " + self.hostname
|
return "DX Cluster " + self.hostname
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.thread = Thread(target=self.handle)
|
|
||||||
self.thread.start()
|
self.thread.start()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
@@ -46,10 +46,12 @@ class DXCluster(Provider):
|
|||||||
while not connected and self.run:
|
while not connected and self.run:
|
||||||
try:
|
try:
|
||||||
self.status = "Connecting"
|
self.status = "Connecting"
|
||||||
|
logging.info("DX Cluster " + self.hostname + " connecting...")
|
||||||
self.telnet = telnetlib3.Telnet(self.hostname, self.port)
|
self.telnet = telnetlib3.Telnet(self.hostname, self.port)
|
||||||
self.telnet.read_until("login: ".encode("ascii"))
|
self.telnet.read_until("login: ".encode("utf-8"))
|
||||||
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("ascii"))
|
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("utf-8"))
|
||||||
connected = True
|
connected = True
|
||||||
|
logging.info("DX Cluster " + self.hostname + " connected.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.status = "Error"
|
self.status = "Error"
|
||||||
logging.exception("Exception while connecting to DX Cluster Provider (" + self.hostname + ").")
|
logging.exception("Exception while connecting to DX Cluster Provider (" + self.hostname + ").")
|
||||||
@@ -59,8 +61,8 @@ class DXCluster(Provider):
|
|||||||
while connected and self.run:
|
while connected and self.run:
|
||||||
try:
|
try:
|
||||||
# Check new telnet info against regular expression
|
# Check new telnet info against regular expression
|
||||||
telnet_output = self.telnet.read_until("\n".encode("ascii"))
|
telnet_output = self.telnet.read_until("\n".encode("utf-8"))
|
||||||
match = self.LINE_PATTERN.match(telnet_output.decode("ascii"))
|
match = self.LINE_PATTERN.match(telnet_output.decode("utf-8"))
|
||||||
if match:
|
if match:
|
||||||
spot_time = datetime.strptime(match.group(5), "%H%MZ")
|
spot_time = datetime.strptime(match.group(5), "%H%MZ")
|
||||||
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
|
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
|
||||||
@@ -73,10 +75,11 @@ class DXCluster(Provider):
|
|||||||
# Fill in any blanks
|
# Fill in any blanks
|
||||||
spot.infer_missing()
|
spot.infer_missing()
|
||||||
# Add to our list
|
# Add to our list
|
||||||
self.submit([spot])
|
self.submit(spot)
|
||||||
|
|
||||||
self.status = "OK"
|
self.status = "OK"
|
||||||
self.last_update_time = datetime.now(timezone.utc)
|
self.last_update_time = datetime.now(timezone.utc)
|
||||||
|
logging.debug("Data received from DX Cluster " + self.hostname + ".")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
connected = False
|
connected = False
|
||||||
@@ -85,6 +88,7 @@ class DXCluster(Provider):
|
|||||||
logging.exception("Exception in DX Cluster Provider (" + self.hostname + ")")
|
logging.exception("Exception in DX Cluster Provider (" + self.hostname + ")")
|
||||||
sleep(5)
|
sleep(5)
|
||||||
else:
|
else:
|
||||||
|
logging.info("DX Cluster " + self.hostname + " shutting down...")
|
||||||
self.status = "Shutting down"
|
self.status = "Shutting down"
|
||||||
|
|
||||||
self.status = "Disconnected"
|
self.status = "Disconnected"
|
||||||
@@ -26,7 +26,9 @@ class HTTPProvider(Provider):
|
|||||||
# Fire off a one-shot thread to run poll() for the first time, just to ensure start() returns immediately and
|
# Fire off a one-shot thread to run poll() for the first time, just to ensure start() returns immediately and
|
||||||
# the application can continue starting. The thread itself will then die, and the timer will kick in on its own
|
# the application can continue starting. The thread itself will then die, and the timer will kick in on its own
|
||||||
# thread.
|
# thread.
|
||||||
|
logging.info("Set up query of " + self.name() + " API every " + str(self.poll_interval) + " seconds.")
|
||||||
thread = Thread(target=self.poll)
|
thread = Thread(target=self.poll)
|
||||||
|
thread.daemon = True
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
@@ -35,15 +37,17 @@ class HTTPProvider(Provider):
|
|||||||
def poll(self):
|
def poll(self):
|
||||||
try:
|
try:
|
||||||
# Request data from API
|
# Request data from API
|
||||||
|
logging.debug("Polling " + self.name() + " API...")
|
||||||
http_response = requests.get(self.url, headers=self.HTTP_HEADERS)
|
http_response = requests.get(self.url, headers=self.HTTP_HEADERS)
|
||||||
# Pass off to the subclass for processing
|
# Pass off to the subclass for processing
|
||||||
new_spots = self.http_response_to_spots(http_response)
|
new_spots = self.http_response_to_spots(http_response)
|
||||||
# Submit the new spots for processing. There might not be any spots for the less popular programs.
|
# Submit the new spots for processing. There might not be any spots for the less popular programs.
|
||||||
if new_spots:
|
if new_spots:
|
||||||
self.submit(new_spots)
|
self.submit_batch(new_spots)
|
||||||
|
|
||||||
self.status = "OK"
|
self.status = "OK"
|
||||||
self.last_update_time = datetime.now(pytz.UTC)
|
self.last_update_time = datetime.now(pytz.UTC)
|
||||||
|
logging.debug("Received data from " + self.name() + " API.")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.status = "Error"
|
self.status = "Error"
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
@@ -35,7 +36,7 @@ class ParksNPeaks(HTTPProvider):
|
|||||||
|
|
||||||
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore.
|
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore.
|
||||||
if spot.sig not in ["POTA", "SOTA", "WWFF"]:
|
if spot.sig not in ["POTA", "SOTA", "WWFF"]:
|
||||||
print("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!")
|
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!")
|
||||||
# Fill in any missing data
|
# Fill in any missing data
|
||||||
spot.infer_missing()
|
spot.infer_missing()
|
||||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||||
|
|||||||
@@ -30,15 +30,21 @@ class Provider:
|
|||||||
def start(self):
|
def start(self):
|
||||||
raise NotImplementedError("Subclasses must implement this method")
|
raise NotImplementedError("Subclasses must implement this method")
|
||||||
|
|
||||||
# Submit one or more new spots retrieved from the provider. Only spots that are newer than the last spot retrieved
|
# Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved
|
||||||
# by this provider will be added to the spot list, to prevent duplications. This is called by the subclasses on
|
# by this provider will be added to the spot list, to prevent duplications. This is called by the API-querying
|
||||||
# receiving spots.
|
# subclasses on receiving spots.
|
||||||
def submit(self, spots):
|
def submit_batch(self, spots):
|
||||||
for spot in spots:
|
for spot in spots:
|
||||||
if spot.time > self.last_spot_time:
|
if spot.time > self.last_spot_time:
|
||||||
self.spot_list.append(spot)
|
self.spot_list.append(spot)
|
||||||
self.last_spot_time = max(map(lambda s: s.time, spots))
|
self.last_spot_time = max(map(lambda s: s.time, spots))
|
||||||
|
|
||||||
|
# Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. This is
|
||||||
|
# called by the data streaming subclasses, which can be relied upon not to re-provide old spots.
|
||||||
|
def submit(self, spot):
|
||||||
|
self.spot_list.append(spot)
|
||||||
|
self.last_spot_time = spot.time
|
||||||
|
|
||||||
# Stop any threads and prepare for application shutdown
|
# Stop any threads and prepare for application shutdown
|
||||||
def stop(self):
|
def stop(self):
|
||||||
raise NotImplementedError("Subclasses must implement this method")
|
raise NotImplementedError("Subclasses must implement this method")
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
bottle~=0.13.4
|
||||||
requests-cache~=1.2.1
|
requests-cache~=1.2.1
|
||||||
pyhamtools~=0.12.0
|
pyhamtools~=0.12.0
|
||||||
telnetlib3~=2.0.8
|
telnetlib3~=2.0.8
|
||||||
|
|||||||
48
server/webserver.py
Normal file
48
server/webserver.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
import bottle
|
||||||
|
from bottle import run, response
|
||||||
|
|
||||||
|
from core.utils import serialize_everything
|
||||||
|
|
||||||
|
|
||||||
|
# Provides the public-facing web server.
|
||||||
|
class WebServer:
|
||||||
|
|
||||||
|
# Constructor
|
||||||
|
def __init__(self, spot_list, port):
|
||||||
|
self.spot_list = spot_list
|
||||||
|
self.port = port
|
||||||
|
self.thread = Thread(target=self.run)
|
||||||
|
self.thread.daemon = True
|
||||||
|
|
||||||
|
# Set up routing
|
||||||
|
bottle.get("/api/spots")(self.serve_api_spots)
|
||||||
|
|
||||||
|
# Start the web server
|
||||||
|
def start(self):
|
||||||
|
self.thread.start()
|
||||||
|
|
||||||
|
# Run the web server itself. This blocks until the server is shut down, so it runs in a separate thread.
|
||||||
|
def run(self):
|
||||||
|
logging.info("Starting web server on port " + str(self.port) + "...")
|
||||||
|
run(host='localhost', port=self.port)
|
||||||
|
|
||||||
|
# Main spots API
|
||||||
|
def serve_api_spots(self):
|
||||||
|
spots_json = json.dumps(self.spot_list, default=serialize_everything)
|
||||||
|
response.content_type = 'application/json'
|
||||||
|
return spots_json
|
||||||
|
|
||||||
|
|
||||||
|
# Todo spot API arguments e.g. "since" based on received_time of spots, sources, sigs, dx cont, dxcc, de cont, band, mode, filter out qrt, filter pre-qsy
|
||||||
|
# Todo serve status API
|
||||||
|
# Todo serve apidocs
|
||||||
|
# Todo serve website
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
# @route('/download/<filename>')
|
||||||
|
# def download(filename):
|
||||||
|
# return static_file(filename, root='/path/to/static/files', download=f"download-{filename}")
|
||||||
Reference in New Issue
Block a user