mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2025-10-27 08:49:27 +00:00
Implement basic API server
This commit is contained in:
@@ -30,4 +30,5 @@ Suggested names so far:
|
||||
* Collect-o-Matic
|
||||
* Spot-o-Tron
|
||||
* Basic Universal Radio Program (BURP)
|
||||
* The Spotinator
|
||||
* The Spotinator
|
||||
* DX Cluster API
|
||||
@@ -1,4 +1,3 @@
|
||||
# Provides a timed cleanup of the spot list.
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from threading import Timer
|
||||
@@ -7,6 +6,7 @@ from time import sleep
|
||||
import pytz
|
||||
|
||||
|
||||
# Provides a timed cleanup of the spot list.
|
||||
class CleanupTimer:
|
||||
|
||||
# Constructor
|
||||
|
||||
@@ -6,6 +6,7 @@ SOFTWARE_VERSION = "0.1"
|
||||
|
||||
# Todo make configurable
|
||||
SERVER_OWNER_CALLSIGN = "M0TRT"
|
||||
WEB_SERVER_PORT = 8080
|
||||
MAX_SPOT_AGE_SEC = 3600
|
||||
|
||||
# Modes
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODES, ALL_MODES
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from pyhamtools import LookupLib, Callinfo
|
||||
|
||||
from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODES, ALL_MODES
|
||||
|
||||
# Static lookup helpers from pyhamtools
|
||||
# todo in future add QRZ as a second lookup option in case it provides more data?
|
||||
lookuplib = LookupLib(lookuptype="countryfile")
|
||||
@@ -23,7 +27,7 @@ def infer_mode_family_from_mode(mode):
|
||||
return "DATA"
|
||||
else:
|
||||
if mode.upper() != "OTHER":
|
||||
print("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
|
||||
logging.warn("Found an unrecognised mode: " + mode + ". Developer should categorise this.")
|
||||
return None
|
||||
|
||||
# Infer a band from a frequency in kHz
|
||||
@@ -66,4 +70,12 @@ def infer_itu_zone_from_callsign(call):
|
||||
try:
|
||||
return callinfo.get_ituz(call)
|
||||
except KeyError as e:
|
||||
return None
|
||||
return None
|
||||
|
||||
# Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things.
|
||||
# Converts datetimes to ISO.
|
||||
# Anything else it tries to convert to a dict.
|
||||
def serialize_everything(obj):
|
||||
if isinstance(obj, datetime):
|
||||
return obj.isoformat()
|
||||
return obj.__dict__
|
||||
@@ -1,3 +1,4 @@
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
@@ -131,4 +132,8 @@ class Spot:
|
||||
|
||||
# TODO use QRZ/HamQTH provider to get grids, lat Lon, when missing; and DX name
|
||||
# credentials in config file which is .gitignored; sample provided
|
||||
# TODO lat/lon from DXCC centre as last resort?
|
||||
# TODO lat/lon from DXCC centre as last resort?
|
||||
|
||||
# JSON serialise
|
||||
def to_json(self):
|
||||
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True)
|
||||
37
main.py
37
main.py
@@ -1,8 +1,11 @@
|
||||
# Main script
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
from time import sleep
|
||||
|
||||
from core.cleanup import CleanupTimer
|
||||
from core.constants import MAX_SPOT_AGE_SEC
|
||||
from core.constants import MAX_SPOT_AGE_SEC, WEB_SERVER_PORT
|
||||
from providers.dxcluster import DXCluster
|
||||
from providers.gma import GMA
|
||||
from providers.hema import HEMA
|
||||
@@ -11,17 +14,32 @@ from providers.pota import POTA
|
||||
from providers.sota import SOTA
|
||||
from providers.wwbota import WWBOTA
|
||||
from providers.wwff import WWFF
|
||||
from server.webserver import WebServer
|
||||
|
||||
# Main control flag, switch False to stop main application thread
|
||||
run = True
|
||||
|
||||
# Shutdown function
|
||||
def shutdown(sig, frame):
|
||||
print("Stopping program, this may take a few seconds...")
|
||||
logging.info("Stopping program, this may take a few seconds...")
|
||||
global run
|
||||
run = False
|
||||
for p in providers: p.stop()
|
||||
cleanup_timer.stop()
|
||||
|
||||
|
||||
# Main function
|
||||
if __name__ == '__main__':
|
||||
print("Starting...")
|
||||
# Set up logging
|
||||
root = logging.getLogger()
|
||||
root.setLevel(logging.INFO)
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter("%(message)s")
|
||||
handler.setFormatter(formatter)
|
||||
root.addHandler(handler)
|
||||
logging.info("Starting...")
|
||||
|
||||
# Shut down gracefully on SIGINT
|
||||
signal.signal(signal.SIGINT, shutdown)
|
||||
|
||||
@@ -48,13 +66,14 @@ if __name__ == '__main__':
|
||||
cleanup_timer = CleanupTimer(spot_list=spot_list, cleanup_interval=60, max_spot_age=MAX_SPOT_AGE_SEC)
|
||||
cleanup_timer.start()
|
||||
|
||||
# Todo serve spot API
|
||||
# Todo spot API arguments e.g. "since" based on received_time of spots, sources, sigs, dx cont, dxcc, de cont, band, mode, filter out qrt, filter pre-qsy
|
||||
# Todo serve status API
|
||||
# Todo serve apidocs
|
||||
# Todo serve website
|
||||
# Set up web server
|
||||
web_server = WebServer(spot_list=spot_list, port=WEB_SERVER_PORT)
|
||||
web_server.start()
|
||||
|
||||
print("Startup complete.")
|
||||
logging.info("Startup complete.")
|
||||
|
||||
while run:
|
||||
sleep(1)
|
||||
|
||||
|
||||
# TODO NOTES FOR NGINX REVERSE PROXY
|
||||
|
||||
@@ -25,14 +25,14 @@ class DXCluster(Provider):
|
||||
self.hostname = hostname
|
||||
self.port = port
|
||||
self.telnet = None
|
||||
self.thread = None
|
||||
self.thread = Thread(target=self.handle)
|
||||
self.thread.daemon = True
|
||||
self.run = True
|
||||
|
||||
def name(self):
|
||||
return "DX Cluster " + self.hostname
|
||||
|
||||
def start(self):
|
||||
self.thread = Thread(target=self.handle)
|
||||
self.thread.start()
|
||||
|
||||
def stop(self):
|
||||
@@ -46,10 +46,12 @@ class DXCluster(Provider):
|
||||
while not connected and self.run:
|
||||
try:
|
||||
self.status = "Connecting"
|
||||
logging.info("DX Cluster " + self.hostname + " connecting...")
|
||||
self.telnet = telnetlib3.Telnet(self.hostname, self.port)
|
||||
self.telnet.read_until("login: ".encode("ascii"))
|
||||
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("ascii"))
|
||||
self.telnet.read_until("login: ".encode("utf-8"))
|
||||
self.telnet.write((SERVER_OWNER_CALLSIGN + "\n").encode("utf-8"))
|
||||
connected = True
|
||||
logging.info("DX Cluster " + self.hostname + " connected.")
|
||||
except Exception as e:
|
||||
self.status = "Error"
|
||||
logging.exception("Exception while connecting to DX Cluster Provider (" + self.hostname + ").")
|
||||
@@ -59,8 +61,8 @@ class DXCluster(Provider):
|
||||
while connected and self.run:
|
||||
try:
|
||||
# Check new telnet info against regular expression
|
||||
telnet_output = self.telnet.read_until("\n".encode("ascii"))
|
||||
match = self.LINE_PATTERN.match(telnet_output.decode("ascii"))
|
||||
telnet_output = self.telnet.read_until("\n".encode("utf-8"))
|
||||
match = self.LINE_PATTERN.match(telnet_output.decode("utf-8"))
|
||||
if match:
|
||||
spot_time = datetime.strptime(match.group(5), "%H%MZ")
|
||||
spot_datetime = datetime.combine(datetime.today(), spot_time.time()).replace(tzinfo=pytz.UTC)
|
||||
@@ -73,10 +75,11 @@ class DXCluster(Provider):
|
||||
# Fill in any blanks
|
||||
spot.infer_missing()
|
||||
# Add to our list
|
||||
self.submit([spot])
|
||||
self.submit(spot)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(timezone.utc)
|
||||
logging.debug("Data received from DX Cluster " + self.hostname + ".")
|
||||
|
||||
except Exception as e:
|
||||
connected = False
|
||||
@@ -85,6 +88,7 @@ class DXCluster(Provider):
|
||||
logging.exception("Exception in DX Cluster Provider (" + self.hostname + ")")
|
||||
sleep(5)
|
||||
else:
|
||||
logging.info("DX Cluster " + self.hostname + " shutting down...")
|
||||
self.status = "Shutting down"
|
||||
|
||||
self.status = "Disconnected"
|
||||
@@ -26,7 +26,9 @@ class HTTPProvider(Provider):
|
||||
# Fire off a one-shot thread to run poll() for the first time, just to ensure start() returns immediately and
|
||||
# the application can continue starting. The thread itself will then die, and the timer will kick in on its own
|
||||
# thread.
|
||||
logging.info("Set up query of " + self.name() + " API every " + str(self.poll_interval) + " seconds.")
|
||||
thread = Thread(target=self.poll)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
def stop(self):
|
||||
@@ -35,15 +37,17 @@ class HTTPProvider(Provider):
|
||||
def poll(self):
|
||||
try:
|
||||
# Request data from API
|
||||
logging.debug("Polling " + self.name() + " API...")
|
||||
http_response = requests.get(self.url, headers=self.HTTP_HEADERS)
|
||||
# Pass off to the subclass for processing
|
||||
new_spots = self.http_response_to_spots(http_response)
|
||||
# Submit the new spots for processing. There might not be any spots for the less popular programs.
|
||||
if new_spots:
|
||||
self.submit(new_spots)
|
||||
self.submit_batch(new_spots)
|
||||
|
||||
self.status = "OK"
|
||||
self.last_update_time = datetime.now(pytz.UTC)
|
||||
logging.debug("Received data from " + self.name() + " API.")
|
||||
|
||||
except Exception as e:
|
||||
self.status = "Error"
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
import pytz
|
||||
@@ -35,7 +36,7 @@ class ParksNPeaks(HTTPProvider):
|
||||
|
||||
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore.
|
||||
if spot.sig not in ["POTA", "SOTA", "WWFF"]:
|
||||
print("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!")
|
||||
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!")
|
||||
# Fill in any missing data
|
||||
spot.infer_missing()
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
|
||||
@@ -30,15 +30,21 @@ class Provider:
|
||||
def start(self):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
|
||||
# Submit one or more new spots retrieved from the provider. Only spots that are newer than the last spot retrieved
|
||||
# by this provider will be added to the spot list, to prevent duplications. This is called by the subclasses on
|
||||
# receiving spots.
|
||||
def submit(self, spots):
|
||||
# Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved
|
||||
# by this provider will be added to the spot list, to prevent duplications. This is called by the API-querying
|
||||
# subclasses on receiving spots.
|
||||
def submit_batch(self, spots):
|
||||
for spot in spots:
|
||||
if spot.time > self.last_spot_time:
|
||||
self.spot_list.append(spot)
|
||||
self.last_spot_time = max(map(lambda s: s.time, spots))
|
||||
|
||||
# Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. This is
|
||||
# called by the data streaming subclasses, which can be relied upon not to re-provide old spots.
|
||||
def submit(self, spot):
|
||||
self.spot_list.append(spot)
|
||||
self.last_spot_time = spot.time
|
||||
|
||||
# Stop any threads and prepare for application shutdown
|
||||
def stop(self):
|
||||
raise NotImplementedError("Subclasses must implement this method")
|
||||
@@ -1,3 +1,4 @@
|
||||
bottle~=0.13.4
|
||||
requests-cache~=1.2.1
|
||||
pyhamtools~=0.12.0
|
||||
telnetlib3~=2.0.8
|
||||
|
||||
48
server/webserver.py
Normal file
48
server/webserver.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import json
|
||||
import logging
|
||||
from threading import Thread
|
||||
|
||||
import bottle
|
||||
from bottle import run, response
|
||||
|
||||
from core.utils import serialize_everything
|
||||
|
||||
|
||||
# Provides the public-facing web server.
|
||||
class WebServer:
|
||||
|
||||
# Constructor
|
||||
def __init__(self, spot_list, port):
|
||||
self.spot_list = spot_list
|
||||
self.port = port
|
||||
self.thread = Thread(target=self.run)
|
||||
self.thread.daemon = True
|
||||
|
||||
# Set up routing
|
||||
bottle.get("/api/spots")(self.serve_api_spots)
|
||||
|
||||
# Start the web server
|
||||
def start(self):
|
||||
self.thread.start()
|
||||
|
||||
# Run the web server itself. This blocks until the server is shut down, so it runs in a separate thread.
|
||||
def run(self):
|
||||
logging.info("Starting web server on port " + str(self.port) + "...")
|
||||
run(host='localhost', port=self.port)
|
||||
|
||||
# Main spots API
|
||||
def serve_api_spots(self):
|
||||
spots_json = json.dumps(self.spot_list, default=serialize_everything)
|
||||
response.content_type = 'application/json'
|
||||
return spots_json
|
||||
|
||||
|
||||
# Todo spot API arguments e.g. "since" based on received_time of spots, sources, sigs, dx cont, dxcc, de cont, band, mode, filter out qrt, filter pre-qsy
|
||||
# Todo serve status API
|
||||
# Todo serve apidocs
|
||||
# Todo serve website
|
||||
|
||||
# Examples
|
||||
# @route('/download/<filename>')
|
||||
# def download(filename):
|
||||
# return static_file(filename, root='/path/to/static/files', download=f"download-{filename}")
|
||||
Reference in New Issue
Block a user