Split up some code for sanity #3

This commit is contained in:
Ian Renton
2025-12-23 11:51:00 +00:00
parent fd246fc17b
commit 61784e8af6
12 changed files with 341 additions and 242 deletions

View File

@@ -0,0 +1,7 @@
import tornado
# API request handler for /api/v1/spot (POST)
class APISpotHandler(tornado.web.RequestHandler):
def post(self):
# todo
self.write("Hello, world")

View File

@@ -0,0 +1,57 @@
import tornado
# API request handler for /api/v1/alerts
class APIAlertsHandler(tornado.web.RequestHandler):
def get(self):
# todo
self.write("Hello, world")
# API request handler for /api/v1/alerts/stream
class APIAlertsStreamHandler(tornado.web.RequestHandler):
def get(self):
# todo
self.write("Hello, world")
# Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list
# of query parameters and their function is defined in the API docs.
def alert_allowed_by_query(alert, query):
for k in query.keys():
match k:
case "received_since":
since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC)
if not alert.received_time or alert.received_time <= since:
return False
case "max_duration":
max_duration = int(query.get(k))
# Check the duration if end_time is provided. If end_time is not provided, assume the activation is
# "short", i.e. it always passes this check. If dxpeditions_skip_max_duration_check is true and
# the alert is a dxpedition, it also always passes the check.
if alert.is_dxpedition and (bool(query.get(
"dxpeditions_skip_max_duration_check")) if "dxpeditions_skip_max_duration_check" in query.keys() else False):
continue
if alert.end_time and alert.start_time and alert.end_time - alert.start_time > max_duration:
return False
case "source":
sources = query.get(k).split(",")
if not alert.source or alert.source not in sources:
return False
case "sig":
# If a list of sigs is provided, the alert must have a sig and it must match one of them.
# The special "sig" "NO_SIG", when supplied in the list, mathches alerts with no sig.
sigs = query.get(k).split(",")
include_no_sig = "NO_SIG" in sigs
if not alert.sig and not include_no_sig:
return False
if alert.sig and alert.sig not in sigs:
return False
case "dx_continent":
dxconts = query.get(k).split(",")
if not alert.dx_continent or alert.dx_continent not in dxconts:
return False
case "dx_call_includes":
dx_call_includes = query.get(k).strip()
if not alert.dx_call or dx_call_includes.upper() not in alert.dx_call.upper():
return False
return True

View File

@@ -0,0 +1,13 @@
import tornado
# API request handler for /api/v1/lookup/call
class APILookupCallHandler(tornado.web.RequestHandler):
def get(self):
# todo
self.write("Hello, world")
# API request handler for /api/v1/lookup/sigref
class APILookupSIGRefHandler(tornado.web.RequestHandler):
def get(self):
# todo
self.write("Hello, world")

View File

@@ -0,0 +1,37 @@
import json
import tornado
from core.config import MAX_SPOT_AGE, ALLOW_SPOTTING, WEB_UI_OPTIONS
from core.constants import BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS
from core.utils import serialize_everything
# API request handler for /api/v1/options
class APIOptionsHandler(tornado.web.RequestHandler):
def initialize(self, status_data):
self.status_data = status_data
def get(self):
options = {"bands": BANDS,
"modes": ALL_MODES,
"mode_types": MODE_TYPES,
"sigs": SIGS,
# Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available.
"spot_sources": list(
map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))),
"alert_sources": list(
map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))),
"continents": CONTINENTS,
"max_spot_age": MAX_SPOT_AGE,
"spot_allowed": ALLOW_SPOTTING,
"web-ui-options": WEB_UI_OPTIONS}
# If spotting to this server is enabled, "API" is another valid spot source even though it does not come from
# one of our proviers.
if ALLOW_SPOTTING:
options["spot_sources"].append("API")
self.write(json.dumps(options, default=serialize_everything))
self.set_status(200)
self.set_header("Cache-Control", "no-store")
self.set_header("Content-Type", "application/json")

View File

@@ -0,0 +1,163 @@
import json
import logging
from datetime import datetime, timedelta
import pytz
import tornado
from core.utils import serialize_everything
# API request handler for /api/v1/spots
class APISpotsHandler(tornado.web.RequestHandler):
def initialize(self, spots):
self.spots = spots
def get(self):
try:
# request.arguments contains lists for each param key because technically the client can supply multiple,
# reduce that to just the first entry
query_params = {k: v[0] for k, v in self.request.arguments.items()}
# Fetch all spots matching the query
data = get_spot_list_with_filters(self.spots, query_params)
self.write(json.dumps(data, default=serialize_everything))
self.set_status(200)
except ValueError as e:
logging.error(e)
self.write(json.dumps("Bad request - " + str(e), default=serialize_everything))
self.set_status(400)
except Exception as e:
logging.error(e)
self.write(json.dumps("Error - " + str(e), default=serialize_everything))
self.set_status(500)
self.set_header("Cache-Control", "no-store")
self.set_header("Content-Type", "application/json")
# API request handler for /api/v1/spots/stream
class APISpotsStreamHandler(tornado.web.RequestHandler):
def get(self):
# todo
self.write("Hello, world")
# Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in
# the main "spots" GET call.
def get_spot_list_with_filters(all_spots, query):
# Create a shallow copy of the spot list, ordered by spot time, then filter the list to reduce it only to spots
# that match the filter parameters in the query string. Finally, apply a limit to the number of spots returned.
# The list of query string filters is defined in the API docs.
spot_ids = list(all_spots.iterkeys())
spots = []
for k in spot_ids:
s = all_spots.get(k)
if s is not None:
spots.append(s)
spots = sorted(spots, key=lambda spot: (spot.time if spot and spot.time else 0), reverse=True)
spots = list(filter(lambda spot: spot_allowed_by_query(spot, query), spots))
if "limit" in query.keys():
spots = spots[:int(query.get("limit"))]
# Ensure only the latest spot of each callsign-SSID combo is present in the list. This relies on the
# list being in reverse time order, so if any future change allows re-ordering the list, that should
# be done *after* this. SSIDs are deliberately included here (see issue #68) because e.g. M0TRT-7
# and M0TRT-9 APRS transponders could well be in different locations, on different frequencies etc.
# This is a special consideration for the geo map and band map views (and Field Spotter) because while
# duplicates are fine in the main spot list (e.g. different cluster spots of the same DX) this doesn't
# work well for the other views.
if "dedupe" in query.keys():
dedupe = query.get("dedupe").upper() == "TRUE"
if dedupe:
spots_temp = []
already_seen = []
for s in spots:
call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "")
if call_plus_ssid not in already_seen:
spots_temp.append(s)
already_seen.append(call_plus_ssid)
spots = spots_temp
return spots
# Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list
# of query parameters and their function is defined in the API docs.
def spot_allowed_by_query(spot, query):
for k in query.keys():
match k:
case "since":
since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC).timestamp()
if not spot.time or spot.time <= since:
return False
case "max_age":
max_age = int(query.get(k))
since = (datetime.now(pytz.UTC) - timedelta(seconds=max_age)).timestamp()
if not spot.time or spot.time <= since:
return False
case "received_since":
since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC).timestamp()
if not spot.received_time or spot.received_time <= since:
return False
case "source":
sources = query.get(k).split(",")
if not spot.source or spot.source not in sources:
return False
case "sig":
# If a list of sigs is provided, the spot must have a sig and it must match one of them.
# The special "sig" "NO_SIG", when supplied in the list, mathches spots with no sig.
sigs = query.get(k).split(",")
include_no_sig = "NO_SIG" in sigs
if not spot.sig and not include_no_sig:
return False
if spot.sig and spot.sig not in sigs:
return False
case "needs_sig":
# If true, a sig is required, regardless of what it is, it just can't be missing. Mutually
# exclusive with supplying the special "NO_SIG" parameter to the "sig" query param.
needs_sig = query.get(k).upper() == "TRUE"
if needs_sig and not spot.sig:
return False
case "needs_sig_ref":
# If true, at least one sig ref is required, regardless of what it is, it just can't be missing.
needs_sig_ref = query.get(k).upper() == "TRUE"
if needs_sig_ref and (not spot.sig_refs or len(spot.sig_refs) == 0):
return False
case "band":
bands = query.get(k).split(",")
if not spot.band or spot.band not in bands:
return False
case "mode":
modes = query.get(k).split(",")
if not spot.mode or spot.mode not in modes:
return False
case "mode_type":
mode_types = query.get(k).split(",")
if not spot.mode_type or spot.mode_type not in mode_types:
return False
case "dx_continent":
dxconts = query.get(k).split(",")
if not spot.dx_continent or spot.dx_continent not in dxconts:
return False
case "de_continent":
deconts = query.get(k).split(",")
if not spot.de_continent or spot.de_continent not in deconts:
return False
case "comment_includes":
comment_includes = query.get(k).strip()
if not spot.comment or comment_includes.upper() not in spot.comment.upper():
return False
case "dx_call_includes":
dx_call_includes = query.get(k).strip()
if not spot.dx_call or dx_call_includes.upper() not in spot.dx_call.upper():
return False
case "allow_qrt":
# If false, spots that are flagged as QRT are not returned.
prevent_qrt = query.get(k).upper() == "FALSE"
if prevent_qrt and spot.qrt and spot.qrt == True:
return False
case "needs_good_location":
# If true, spots require a "good" location to be returned
needs_good_location = query.get(k).upper() == "TRUE"
if needs_good_location and not spot.dx_location_good:
return False
return True

View File

@@ -0,0 +1,17 @@
import json
import tornado
from core.utils import serialize_everything
# API request handler for /api/v1/status
class APIStatusHandler(tornado.web.RequestHandler):
def initialize(self, status_data):
self.status_data = status_data
def get(self):
self.write(json.dumps(self.status_data, default=serialize_everything))
self.set_status(200)
self.set_header("Cache-Control", "no-store")
self.set_header("Content-Type", "application/json")

View File

@@ -0,0 +1,12 @@
import tornado
from prometheus_client.openmetrics.exposition import CONTENT_TYPE_LATEST
from core.prometheus_metrics_handler import get_metrics
# Handler for Prometheus metrics endpoint
class PrometheusMetricsHandler(tornado.web.RequestHandler):
def get(self):
self.write(get_metrics())
self.set_status(200)
self.set_header('Content-Type', CONTENT_TYPE_LATEST)

View File

@@ -0,0 +1,14 @@
import tornado
from core.config import ALLOW_SPOTTING
from core.constants import SOFTWARE_VERSION
# Handler for all HTML pages generated from templates
class PageTemplateHandler(tornado.web.RequestHandler):
def initialize(self, template_name):
self.template_name = template_name
def get(self):
# Load named template, and provide variables used in templates
self.render(self.template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING)