diff --git a/core/prometheus_metrics_handler.py b/core/prometheus_metrics_handler.py index e452bd1..7d478ac 100644 --- a/core/prometheus_metrics_handler.py +++ b/core/prometheus_metrics_handler.py @@ -33,8 +33,6 @@ memory_use_gauge = Gauge( ) -# Get a Prometheus metrics response for Bottle +# Get a Prometheus metrics response for the web server def get_metrics(): - response.content_type = CONTENT_TYPE_LATEST - response.status = 200 return generate_latest(registry) diff --git a/requirements.txt b/requirements.txt index 7b00a05..a57523f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,4 @@ pyyaml~=6.0.3 -bottle~=0.13.4 requests-cache~=1.2.1 pyhamtools~=0.12.0 telnetlib3~=2.0.8 @@ -14,4 +13,4 @@ pyproj~=3.7.2 prometheus_client~=0.23.1 beautifulsoup4~=4.14.2 websocket-client~=1.9.0 -gevent~=25.9.1 \ No newline at end of file +tornado~=6.5.4 \ No newline at end of file diff --git a/server/webserver.py b/server/webserver.py index f6b0906..7300e80 100644 --- a/server/webserver.py +++ b/server/webserver.py @@ -1,27 +1,34 @@ +import asyncio import json -import logging -import re +import os from datetime import datetime, timedelta -from queue import Queue -from threading import Thread -import bottle -import gevent import pytz -from bottle import run, request, response, template +import tornado +from prometheus_client.openmetrics.exposition import CONTENT_TYPE_LATEST +from tornado.web import StaticFileHandler -from core.config import MAX_SPOT_AGE, ALLOW_SPOTTING, WEB_UI_OPTIONS -from core.constants import BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS, SOFTWARE_VERSION, UNKNOWN_BAND -from core.lookup_helper import lookup_helper -from core.prometheus_metrics_handler import page_requests_counter, get_metrics, api_requests_counter -from core.sig_utils import get_ref_regex_for_sig, populate_sig_ref_info -from data.sig_ref import SIGRef -from data.spot import Spot +from core.config import ALLOW_SPOTTING, MAX_SPOT_AGE, WEB_UI_OPTIONS +from core.constants import SOFTWARE_VERSION, BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS +from core.prometheus_metrics_handler import get_metrics, page_requests_counter # Provides the public-facing web server. -class WebServer: +# TODO synchronous API responses +# TODO SSE API responses +# TODO clean_up_sse_queues +# TODO page & API access counters - how to do from a subclass handler? e.g. +# self.last_api_access_time = datetime.now(pytz.UTC) +# self.api_access_counter += 1 +# api_requests_counter.inc() +# self.status = "OK" +# +# self.last_page_access_time = datetime.now(pytz.UTC) +# self.page_access_counter += 1 +# page_requests_counter.inc() +# self.status = "OK" +class WebServer: # Constructor def __init__(self, spots, alerts, status_data, port): self.last_page_access_time = None @@ -34,422 +41,152 @@ class WebServer: self.sse_alert_queues = [] self.status_data = status_data self.port = port - self.thread = Thread(target=self.run) - self.thread.daemon = True self.status = "Starting" - - # Base template data - bottle.BaseTemplate.defaults['software_version'] = SOFTWARE_VERSION - bottle.BaseTemplate.defaults['allow_spotting'] = ALLOW_SPOTTING - - # Routes for API calls - bottle.get("/api/v1/spots")(lambda: self.serve_spots_api()) - bottle.get("/api/v1/alerts")(lambda: self.serve_alerts_api()) - bottle.get("/api/v1/spots/stream")(lambda: self.serve_sse_spots_api()) - bottle.get("/api/v1/alerts/stream")(lambda: self.serve_sse_alerts_api()) - bottle.get("/api/v1/options")(lambda: self.serve_api(self.get_options())) - bottle.get("/api/v1/status")(lambda: self.serve_api(self.status_data)) - bottle.get("/api/v1/lookup/call")(lambda: self.serve_call_lookup_api()) - bottle.get("/api/v1/lookup/sigref")(lambda: self.serve_sig_ref_lookup_api()) - bottle.post("/api/v1/spot")(lambda: self.accept_spot()) - # Routes for templated pages - bottle.get("/")(lambda: self.serve_template('webpage_spots')) - bottle.get("/map")(lambda: self.serve_template('webpage_map')) - bottle.get("/bands")(lambda: self.serve_template('webpage_bands')) - bottle.get("/alerts")(lambda: self.serve_template('webpage_alerts')) - bottle.get("/add-spot")(lambda: self.serve_template('webpage_add_spot')) - bottle.get("/status")(lambda: self.serve_template('webpage_status')) - bottle.get("/about")(lambda: self.serve_template('webpage_about')) - bottle.get("/apidocs")(lambda: self.serve_template('webpage_apidocs')) - # Route for Prometheus metrics - bottle.get("/metrics")(lambda: self.serve_prometheus_metrics()) - # Default route to serve from "webassets" - bottle.get("/")(self.serve_static_file) + self.shutdown_event = asyncio.Event() # Start the web server def start(self): - self.thread.start() + asyncio.run(self.start_inner()) - # Run the web server itself. This blocks until the server is shut down, so it runs in a separate thread. - def run(self): - logging.info("Starting web server on port " + str(self.port) + "...") - self.status = "Waiting" - run(host='localhost', port=self.port, server="gevent") + # Stop the web server + def stop(self): + self.shutdown_event.set() - # Serve the JSON API /spots endpoint - def serve_spots_api(self): - try: - data = self.get_spot_list_with_filters() - return self.serve_api(data) - except ValueError as e: - logging.error(e) - response.content_type = 'application/json' - response.status = 400 - return json.dumps("Bad request - " + str(e), default=serialize_everything) - except Exception as e: - logging.error(e) - response.content_type = 'application/json' - response.status = 500 - return json.dumps("Error - " + str(e), default=serialize_everything) + # Start method (async). Sets up the Tornado application. + async def start_inner(self): + app = tornado.web.Application([ + # Routes for API calls + (r"/api/v1/spots", APISpotsHandler), + (r"/api/v1/alerts", APIAlertsHandler), + (r"/api/v1/spots/stream", APISpotsStreamHandler), + (r"/api/v1/alerts/stream", APIAlertsStreamHandler), + (r"/api/v1/options", APIOptionsHandler, {"status_data": self.status_data}), + (r"/api/v1/status", APIStatusHandler, {"status_data": self.status_data}), + (r"/api/v1/lookup/call", APILookupCallHandler), + (r"/api/v1/lookup/sigref", APILookupSIGRefHandler), + (r"/api/v1/spot", APISpotHandler), + # Routes for templated pages + (r"/", PageTemplateHandler, {"template_name": "spots"}), + (r"/map", PageTemplateHandler, {"template_name": "map"}), + (r"/bands", PageTemplateHandler, {"template_name": "bands"}), + (r"/alerts", PageTemplateHandler, {"template_name": "alerts"}), + (r"/add-spot", PageTemplateHandler, {"template_name": "add_spot"}), + (r"/status", PageTemplateHandler, {"template_name": "status"}), + (r"/about", PageTemplateHandler, {"template_name": "about"}), + (r"/apidocs", PageTemplateHandler, {"template_name": "apidocs"}), + # Route for Prometheus metrics + (r"/metrics", PrometheusMetricsHandler), + # Default route to serve from "webassets" + (r"/(.*)", StaticFileHandler, {"path": os.path.join(os.path.dirname(__file__), "../webassets")}), + ], + template_path=os.path.join(os.path.dirname(__file__), "../templates"), + debug=True) # todo set false + app.listen(self.port) + await self.shutdown_event.wait() - # Serve the JSON API /alerts endpoint - def serve_alerts_api(self): - try: - data = self.get_alert_list_with_filters() - return self.serve_api(data) - except ValueError as e: - logging.error(e) - response.content_type = 'application/json' - response.status = 400 - return json.dumps("Bad request - " + str(e), default=serialize_everything) - except Exception as e: - logging.error(e) - response.content_type = 'application/json' - response.status = 500 - return json.dumps("Error - " + str(e), default=serialize_everything) - - # Serve the SSE JSON API /spots/stream endpoint - def serve_sse_spots_api(self): - try: - response.content_type = 'text/event-stream' - response.cache_control = 'no-cache' - yield 'retry: 1000\n\n' - - spot_queue = Queue(maxsize=100) - self.sse_spot_queues.append(spot_queue) - while True: - if spot_queue.empty(): - gevent.sleep(1) - else: - spot = spot_queue.get() - yield 'data: ' + json.dumps(spot, default=serialize_everything) + '\n\n' - except Exception as e: - logging.warn("Exception when serving SSE socket", e) + # Clean up any SSE queues that are growing too large; probably their client disconnected. + def clean_up_sse_queues(self): + # todo + pass - # Serve the SSE JSON API /alerts/stream endpoint - def serve_sse_alerts_api(self): - try: - response.content_type = 'text/event-stream' - response.cache_control = 'no-cache' - yield 'retry: 1000\n\n' +# API request handler for /api/v1/spots +class APISpotsHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") - alert_queue = Queue(maxsize=100) - self.sse_alert_queues.append(alert_queue) - while True: - if alert_queue.empty(): - gevent.sleep(1) - else: - alert = alert_queue.get() - yield 'data: ' + json.dumps(alert, default=serialize_everything) + '\n\n' - except Exception as e: - logging.warn("Exception when serving SSE socket", e) +# API request handler for /api/v1/alerts +class APIAlertsHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") - # Look up data for a callsign - def serve_call_lookup_api(self): - try: - # Reject if no callsign - query = bottle.request.query - if not "call" in query.keys(): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - call must be provided", default=serialize_everything) - call = query.get("call").upper() +# API request handler for /api/v1/spots/stream +class APISpotsStreamHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") - # Reject badly formatted callsigns - if not re.match(r"^[A-Za-z0-9/\-]*$", call): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - '" + call + "' does not look like a valid callsign.", - default=serialize_everything) +# API request handler for /api/v1/alerts/stream +class APIAlertsStreamHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") - # Take the callsign, make a "fake spot" so we can run infer_missing() on it, then repack the resulting data - # in the correct way for the API response. - fake_spot = Spot(dx_call=call) - fake_spot.infer_missing() - return self.serve_api({ - "call": call, - "name": fake_spot.dx_name, - "qth": fake_spot.dx_qth, - "country": fake_spot.dx_country, - "flag": fake_spot.dx_flag, - "continent": fake_spot.dx_continent, - "dxcc_id": fake_spot.dx_dxcc_id, - "cq_zone": fake_spot.dx_cq_zone, - "itu_zone": fake_spot.dx_itu_zone, - "grid": fake_spot.dx_grid, - "latitude": fake_spot.dx_latitude, - "longitude": fake_spot.dx_longitude, - "location_source": fake_spot.dx_location_source - }) +# API request handler for /api/v1/options +class APIOptionsHandler(tornado.web.RequestHandler): + def initialize(self, status_data): + self.status_data = status_data - except Exception as e: - logging.error(e) - response.content_type = 'application/json' - response.status = 500 - return json.dumps("Error - " + str(e), default=serialize_everything) - - # Look up data for a SIG reference - def serve_sig_ref_lookup_api(self): - try: - # Reject if no sig or sig_ref - query = bottle.request.query - if not "sig" in query.keys() or not "id" in query.keys(): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - sig and id must be provided", default=serialize_everything) - sig = query.get("sig").upper() - id = query.get("id").upper() - - # Reject if sig unknown - if not sig in list(map(lambda p: p.name, SIGS)): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - sig '" + sig + "' is not known.", default=serialize_everything) - - # Reject if sig_ref format incorrect for sig - if get_ref_regex_for_sig(sig) and not re.match(get_ref_regex_for_sig(sig), id): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - '" + id + "' does not look like a valid reference ID for " + sig + ".", default=serialize_everything) - - data = populate_sig_ref_info(SIGRef(id=id, sig=sig)) - return self.serve_api(data) - - except Exception as e: - logging.error(e) - response.content_type = 'application/json' - response.status = 500 - return json.dumps("Error - " + str(e), default=serialize_everything) - - # Serve a JSON API endpoint - def serve_api(self, data): - self.last_api_access_time = datetime.now(pytz.UTC) - self.api_access_counter += 1 - api_requests_counter.inc() - self.status = "OK" - response.content_type = 'application/json' - response.set_header('Cache-Control', 'no-store') - return json.dumps(data, default=serialize_everything) - - # Accept a spot - def accept_spot(self): - self.last_api_access_time = datetime.now(pytz.UTC) - self.api_access_counter += 1 - api_requests_counter.inc() - self.status = "OK" - - try: - # Reject if not allowed - if not ALLOW_SPOTTING: - response.content_type = 'application/json' - response.status = 401 - return json.dumps("Error - this server does not allow new spots to be added via the API.", - default=serialize_everything) - - # Reject if format not json - if not request.get_header('Content-Type') or request.get_header('Content-Type') != "application/json": - response.content_type = 'application/json' - response.status = 415 - return json.dumps("Error - request Content-Type must be application/json", default=serialize_everything) - - # Reject if request body is empty - post_data = request.body.read() - if not post_data: - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - request body is empty", default=serialize_everything) - - # Read in the request body as JSON then convert to a Spot object - json_spot = json.loads(post_data) - spot = Spot(**json_spot) - - # Converting to a spot object this way won't have coped with sig_ref objects, so fix that. (Would be nice to - # redo this in a functional style) - if spot.sig_refs: - real_sig_refs = [] - for dict_obj in spot.sig_refs: - real_sig_refs.append(json.loads(json.dumps(dict_obj), object_hook=lambda d: SIGRef(**d))) - spot.sig_refs = real_sig_refs - - # Reject if no timestamp, frequency, dx_call or de_call - if not spot.time or not spot.dx_call or not spot.freq or not spot.de_call: - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - 'time', 'dx_call', 'freq' and 'de_call' must be provided as a minimum.", - default=serialize_everything) - - # Reject invalid-looking callsigns - if not re.match(r"^[A-Za-z0-9/\-]*$", spot.dx_call): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - '" + spot.dx_call + "' does not look like a valid callsign.", - default=serialize_everything) - if not re.match(r"^[A-Za-z0-9/\-]*$", spot.de_call): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - '" + spot.de_call + "' does not look like a valid callsign.", - default=serialize_everything) - - # Reject if frequency not in a known band - if lookup_helper.infer_band_from_freq(spot.freq) == UNKNOWN_BAND: - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - Frequency of " + str(spot.freq / 1000.0) + "kHz is not in a known band.", default=serialize_everything) - - # Reject if grid formatting incorrect - if spot.dx_grid and not re.match(r"^([A-R]{2}[0-9]{2}[A-X]{2}[0-9]{2}[A-X]{2}|[A-R]{2}[0-9]{2}[A-X]{2}[0-9]{2}|[A-R]{2}[0-9]{2}[A-X]{2}|[A-R]{2}[0-9]{2})$", spot.dx_grid.upper()): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - '" + spot.dx_grid + "' does not look like a valid Maidenhead grid.", default=serialize_everything) - - # Reject if sig_ref format incorrect for sig - if spot.sig and spot.sig_refs and len(spot.sig_refs) > 0 and spot.sig_refs[0].id and get_ref_regex_for_sig(spot.sig) and not re.match(get_ref_regex_for_sig(spot.sig), spot.sig_refs[0].id): - response.content_type = 'application/json' - response.status = 422 - return json.dumps("Error - '" + spot.sig_refs[0].id + "' does not look like a valid reference for " + spot.sig + ".", default=serialize_everything) - - # infer missing data, and add it to our database. - spot.source = "API" - spot.infer_missing() - self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) - - response.content_type = 'application/json' - response.set_header('Cache-Control', 'no-store') - response.status = 201 - return json.dumps("OK", default=serialize_everything) - except Exception as e: - logging.error(e) - response.content_type = 'application/json' - response.status = 500 - return json.dumps("Error - " + str(e), default=serialize_everything) - - # Serve a templated page - def serve_template(self, template_name): - self.last_page_access_time = datetime.now(pytz.UTC) - self.page_access_counter += 1 - page_requests_counter.inc() - self.status = "OK" - return template(template_name) - - # Serve general static files from "webassets" directory. - def serve_static_file(self, filepath): - return bottle.static_file(filepath, root="webassets") - - # Serve Prometheus metrics - def serve_prometheus_metrics(self): - return get_metrics() - - # Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in - # the main "spots" GET call. - def get_spot_list_with_filters(self): - # Get the query (and the right one, with Bottle magic. This is a MultiDict object) - query = bottle.request.query - - # Create a shallow copy of the spot list, ordered by spot time, then filter the list to reduce it only to spots - # that match the filter parameters in the query string. Finally, apply a limit to the number of spots returned. - # The list of query string filters is defined in the API docs. - spot_ids = list(self.spots.iterkeys()) - spots = [] - for k in spot_ids: - s = self.spots.get(k) - if s is not None: - spots.append(s) - spots = sorted(spots, key=lambda spot: (spot.time if spot and spot.time else 0), reverse=True) - spots = list(filter(lambda spot: spot_allowed_by_query(spot, query), spots)) - if "limit" in query.keys(): - spots = spots[:int(query.get("limit"))] - - # Ensure only the latest spot of each callsign-SSID combo is present in the list. This relies on the - # list being in reverse time order, so if any future change allows re-ordering the list, that should - # be done *after* this. SSIDs are deliberately included here (see issue #68) because e.g. M0TRT-7 - # and M0TRT-9 APRS transponders could well be in different locations, on different frequencies etc. - # This is a special consideration for the geo map and band map views (and Field Spotter) because while - # duplicates are fine in the main spot list (e.g. different cluster spots of the same DX) this doesn't - # work well for the other views. - if "dedupe" in query.keys(): - dedupe = query.get("dedupe").upper() == "TRUE" - if dedupe: - spots_temp = [] - already_seen = [] - for s in spots: - call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "") - if call_plus_ssid not in already_seen: - spots_temp.append(s) - already_seen.append(call_plus_ssid) - spots = spots_temp - - return spots - - # Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in - # the main "alerts" GET call. - def get_alert_list_with_filters(self): - # Get the query (and the right one, with Bottle magic. This is a MultiDict object) - query = bottle.request.query - - # Create a shallow copy of the alert list ordered by start time, then filter the list to reduce it only to alerts - # that match the filter parameters in the query string. Finally, apply a limit to the number of alerts returned. - # The list of query string filters is defined in the API docs. - alert_ids = list(self.alerts.iterkeys()) - alerts = [] - for k in alert_ids: - a = self.alerts.get(k) - if a is not None: - alerts.append(a) - alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0)) - alerts = list(filter(lambda alert: alert_allowed_by_query(alert, query), alerts)) - if "limit" in query.keys(): - alerts = alerts[:int(query.get("limit"))] - return alerts - - # Return all the "options" for various things that the server is aware of. This can be fetched with an API call. - # The idea is that this will include most of the things that can be provided as queries to the main spots call, - # and thus a client can use this data to configure its filter controls. - def get_options(self): + def get(self): options = {"bands": BANDS, - "modes": ALL_MODES, - "mode_types": MODE_TYPES, - "sigs": SIGS, - # Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. - "spot_sources": list( - map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), - "alert_sources": list( - map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), - "continents": CONTINENTS, - "max_spot_age": MAX_SPOT_AGE, - "spot_allowed": ALLOW_SPOTTING, - "web-ui-options": WEB_UI_OPTIONS} + "modes": ALL_MODES, + "mode_types": MODE_TYPES, + "sigs": SIGS, + # Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. + "spot_sources": list( + map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), + "alert_sources": list( + map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), + "continents": CONTINENTS, + "max_spot_age": MAX_SPOT_AGE, + "spot_allowed": ALLOW_SPOTTING, + "web-ui-options": WEB_UI_OPTIONS} # If spotting to this server is enabled, "API" is another valid spot source even though it does not come from # one of our proviers. if ALLOW_SPOTTING: options["spot_sources"].append("API") - return options + self.write(json.dumps(options, default=serialize_everything)) + self.set_header("Cache-Control", "no-store") + self.set_header("Content-Type", "application/json") - # Internal method called when a new spot is added to the system. This is used to ping any SSE clients that are - # awaiting a server-sent message with new spots. - def notify_new_spot(self, spot): - for queue in self.sse_spot_queues: - try: - queue.put(spot) - except: - # Cleanup thread was probably deleting the queue, that's fine - pass +# API request handler for /api/v1/status +class APIStatusHandler(tornado.web.RequestHandler): + def initialize(self, status_data): + self.status_data = status_data - # Internal method called when a new alert is added to the system. This is used to ping any SSE clients that are - # awaiting a server-sent message with new spots. - def notify_new_alert(self, alert): - for queue in self.sse_alert_queues: - try: - queue.put(alert) - except: - # Cleanup thread was probably deleting the queue, that's fine - pass + def get(self): + self.write(json.dumps(self.status_data, default=serialize_everything)) + self.set_header("Cache-Control", "no-store") + self.set_header("Content-Type", "application/json") + +# API request handler for /api/v1/lookup/call +class APILookupCallHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") + +# API request handler for /api/v1/lookup/sigref +class APILookupSIGRefHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") + +# API request handler for /api/v1/spot (POST) +class APISpotHandler(tornado.web.RequestHandler): + def post(self): + # todo + self.write("Hello, world") + + +# Handler for all HTML pages generated from templates +class PageTemplateHandler(tornado.web.RequestHandler): + def initialize(self, template_name): + self.template_name = template_name + + def get(self): + # Load named template, and provide variables used in templates + self.render(self.template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING) + +# Handler for Prometheus metrics endpoint +class PrometheusMetricsHandler(tornado.web.RequestHandler): + def get(self): + self.write(get_metrics()) + self.set_status(200) + self.set_header('Content-Type', CONTENT_TYPE_LATEST) - # Clean up any SSE queues that are growing too large; probably their client disconnected. - def clean_up_sse_queues(self): - self.sse_spot_queues = [q for q in self.sse_spot_queues if not q.full()] - self.sse_alert_queues = [q for q in self.sse_alert_queues if not q.full()] # Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list @@ -534,6 +271,7 @@ def spot_allowed_by_query(spot, query): return False return True + # Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list # of query parameters and their function is defined in the API docs. def alert_allowed_by_query(alert, query): @@ -576,8 +314,9 @@ def alert_allowed_by_query(alert, query): return False return True + # Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things. # Just converts objects to dict. Try to avoid doing anything clever here when serialising spots, because we also need # to receive spots without complex handling. def serialize_everything(obj): - return obj.__dict__ \ No newline at end of file + return obj.__dict__ diff --git a/server/webserver_old.py b/server/webserver_old.py new file mode 100644 index 0000000..f6b0906 --- /dev/null +++ b/server/webserver_old.py @@ -0,0 +1,583 @@ +import json +import logging +import re +from datetime import datetime, timedelta +from queue import Queue +from threading import Thread + +import bottle +import gevent +import pytz +from bottle import run, request, response, template + +from core.config import MAX_SPOT_AGE, ALLOW_SPOTTING, WEB_UI_OPTIONS +from core.constants import BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS, SOFTWARE_VERSION, UNKNOWN_BAND +from core.lookup_helper import lookup_helper +from core.prometheus_metrics_handler import page_requests_counter, get_metrics, api_requests_counter +from core.sig_utils import get_ref_regex_for_sig, populate_sig_ref_info +from data.sig_ref import SIGRef +from data.spot import Spot + + +# Provides the public-facing web server. +class WebServer: + + # Constructor + def __init__(self, spots, alerts, status_data, port): + self.last_page_access_time = None + self.last_api_access_time = None + self.page_access_counter = 0 + self.api_access_counter = 0 + self.spots = spots + self.alerts = alerts + self.sse_spot_queues = [] + self.sse_alert_queues = [] + self.status_data = status_data + self.port = port + self.thread = Thread(target=self.run) + self.thread.daemon = True + self.status = "Starting" + + # Base template data + bottle.BaseTemplate.defaults['software_version'] = SOFTWARE_VERSION + bottle.BaseTemplate.defaults['allow_spotting'] = ALLOW_SPOTTING + + # Routes for API calls + bottle.get("/api/v1/spots")(lambda: self.serve_spots_api()) + bottle.get("/api/v1/alerts")(lambda: self.serve_alerts_api()) + bottle.get("/api/v1/spots/stream")(lambda: self.serve_sse_spots_api()) + bottle.get("/api/v1/alerts/stream")(lambda: self.serve_sse_alerts_api()) + bottle.get("/api/v1/options")(lambda: self.serve_api(self.get_options())) + bottle.get("/api/v1/status")(lambda: self.serve_api(self.status_data)) + bottle.get("/api/v1/lookup/call")(lambda: self.serve_call_lookup_api()) + bottle.get("/api/v1/lookup/sigref")(lambda: self.serve_sig_ref_lookup_api()) + bottle.post("/api/v1/spot")(lambda: self.accept_spot()) + # Routes for templated pages + bottle.get("/")(lambda: self.serve_template('webpage_spots')) + bottle.get("/map")(lambda: self.serve_template('webpage_map')) + bottle.get("/bands")(lambda: self.serve_template('webpage_bands')) + bottle.get("/alerts")(lambda: self.serve_template('webpage_alerts')) + bottle.get("/add-spot")(lambda: self.serve_template('webpage_add_spot')) + bottle.get("/status")(lambda: self.serve_template('webpage_status')) + bottle.get("/about")(lambda: self.serve_template('webpage_about')) + bottle.get("/apidocs")(lambda: self.serve_template('webpage_apidocs')) + # Route for Prometheus metrics + bottle.get("/metrics")(lambda: self.serve_prometheus_metrics()) + # Default route to serve from "webassets" + bottle.get("/")(self.serve_static_file) + + # Start the web server + def start(self): + self.thread.start() + + # Run the web server itself. This blocks until the server is shut down, so it runs in a separate thread. + def run(self): + logging.info("Starting web server on port " + str(self.port) + "...") + self.status = "Waiting" + run(host='localhost', port=self.port, server="gevent") + + # Serve the JSON API /spots endpoint + def serve_spots_api(self): + try: + data = self.get_spot_list_with_filters() + return self.serve_api(data) + except ValueError as e: + logging.error(e) + response.content_type = 'application/json' + response.status = 400 + return json.dumps("Bad request - " + str(e), default=serialize_everything) + except Exception as e: + logging.error(e) + response.content_type = 'application/json' + response.status = 500 + return json.dumps("Error - " + str(e), default=serialize_everything) + + # Serve the JSON API /alerts endpoint + def serve_alerts_api(self): + try: + data = self.get_alert_list_with_filters() + return self.serve_api(data) + except ValueError as e: + logging.error(e) + response.content_type = 'application/json' + response.status = 400 + return json.dumps("Bad request - " + str(e), default=serialize_everything) + except Exception as e: + logging.error(e) + response.content_type = 'application/json' + response.status = 500 + return json.dumps("Error - " + str(e), default=serialize_everything) + + # Serve the SSE JSON API /spots/stream endpoint + def serve_sse_spots_api(self): + try: + response.content_type = 'text/event-stream' + response.cache_control = 'no-cache' + yield 'retry: 1000\n\n' + + spot_queue = Queue(maxsize=100) + self.sse_spot_queues.append(spot_queue) + while True: + if spot_queue.empty(): + gevent.sleep(1) + else: + spot = spot_queue.get() + yield 'data: ' + json.dumps(spot, default=serialize_everything) + '\n\n' + except Exception as e: + logging.warn("Exception when serving SSE socket", e) + + + # Serve the SSE JSON API /alerts/stream endpoint + def serve_sse_alerts_api(self): + try: + response.content_type = 'text/event-stream' + response.cache_control = 'no-cache' + yield 'retry: 1000\n\n' + + alert_queue = Queue(maxsize=100) + self.sse_alert_queues.append(alert_queue) + while True: + if alert_queue.empty(): + gevent.sleep(1) + else: + alert = alert_queue.get() + yield 'data: ' + json.dumps(alert, default=serialize_everything) + '\n\n' + except Exception as e: + logging.warn("Exception when serving SSE socket", e) + + # Look up data for a callsign + def serve_call_lookup_api(self): + try: + # Reject if no callsign + query = bottle.request.query + if not "call" in query.keys(): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - call must be provided", default=serialize_everything) + call = query.get("call").upper() + + # Reject badly formatted callsigns + if not re.match(r"^[A-Za-z0-9/\-]*$", call): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - '" + call + "' does not look like a valid callsign.", + default=serialize_everything) + + # Take the callsign, make a "fake spot" so we can run infer_missing() on it, then repack the resulting data + # in the correct way for the API response. + fake_spot = Spot(dx_call=call) + fake_spot.infer_missing() + return self.serve_api({ + "call": call, + "name": fake_spot.dx_name, + "qth": fake_spot.dx_qth, + "country": fake_spot.dx_country, + "flag": fake_spot.dx_flag, + "continent": fake_spot.dx_continent, + "dxcc_id": fake_spot.dx_dxcc_id, + "cq_zone": fake_spot.dx_cq_zone, + "itu_zone": fake_spot.dx_itu_zone, + "grid": fake_spot.dx_grid, + "latitude": fake_spot.dx_latitude, + "longitude": fake_spot.dx_longitude, + "location_source": fake_spot.dx_location_source + }) + + except Exception as e: + logging.error(e) + response.content_type = 'application/json' + response.status = 500 + return json.dumps("Error - " + str(e), default=serialize_everything) + + # Look up data for a SIG reference + def serve_sig_ref_lookup_api(self): + try: + # Reject if no sig or sig_ref + query = bottle.request.query + if not "sig" in query.keys() or not "id" in query.keys(): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - sig and id must be provided", default=serialize_everything) + sig = query.get("sig").upper() + id = query.get("id").upper() + + # Reject if sig unknown + if not sig in list(map(lambda p: p.name, SIGS)): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - sig '" + sig + "' is not known.", default=serialize_everything) + + # Reject if sig_ref format incorrect for sig + if get_ref_regex_for_sig(sig) and not re.match(get_ref_regex_for_sig(sig), id): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - '" + id + "' does not look like a valid reference ID for " + sig + ".", default=serialize_everything) + + data = populate_sig_ref_info(SIGRef(id=id, sig=sig)) + return self.serve_api(data) + + except Exception as e: + logging.error(e) + response.content_type = 'application/json' + response.status = 500 + return json.dumps("Error - " + str(e), default=serialize_everything) + + # Serve a JSON API endpoint + def serve_api(self, data): + self.last_api_access_time = datetime.now(pytz.UTC) + self.api_access_counter += 1 + api_requests_counter.inc() + self.status = "OK" + response.content_type = 'application/json' + response.set_header('Cache-Control', 'no-store') + return json.dumps(data, default=serialize_everything) + + # Accept a spot + def accept_spot(self): + self.last_api_access_time = datetime.now(pytz.UTC) + self.api_access_counter += 1 + api_requests_counter.inc() + self.status = "OK" + + try: + # Reject if not allowed + if not ALLOW_SPOTTING: + response.content_type = 'application/json' + response.status = 401 + return json.dumps("Error - this server does not allow new spots to be added via the API.", + default=serialize_everything) + + # Reject if format not json + if not request.get_header('Content-Type') or request.get_header('Content-Type') != "application/json": + response.content_type = 'application/json' + response.status = 415 + return json.dumps("Error - request Content-Type must be application/json", default=serialize_everything) + + # Reject if request body is empty + post_data = request.body.read() + if not post_data: + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - request body is empty", default=serialize_everything) + + # Read in the request body as JSON then convert to a Spot object + json_spot = json.loads(post_data) + spot = Spot(**json_spot) + + # Converting to a spot object this way won't have coped with sig_ref objects, so fix that. (Would be nice to + # redo this in a functional style) + if spot.sig_refs: + real_sig_refs = [] + for dict_obj in spot.sig_refs: + real_sig_refs.append(json.loads(json.dumps(dict_obj), object_hook=lambda d: SIGRef(**d))) + spot.sig_refs = real_sig_refs + + # Reject if no timestamp, frequency, dx_call or de_call + if not spot.time or not spot.dx_call or not spot.freq or not spot.de_call: + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - 'time', 'dx_call', 'freq' and 'de_call' must be provided as a minimum.", + default=serialize_everything) + + # Reject invalid-looking callsigns + if not re.match(r"^[A-Za-z0-9/\-]*$", spot.dx_call): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - '" + spot.dx_call + "' does not look like a valid callsign.", + default=serialize_everything) + if not re.match(r"^[A-Za-z0-9/\-]*$", spot.de_call): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - '" + spot.de_call + "' does not look like a valid callsign.", + default=serialize_everything) + + # Reject if frequency not in a known band + if lookup_helper.infer_band_from_freq(spot.freq) == UNKNOWN_BAND: + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - Frequency of " + str(spot.freq / 1000.0) + "kHz is not in a known band.", default=serialize_everything) + + # Reject if grid formatting incorrect + if spot.dx_grid and not re.match(r"^([A-R]{2}[0-9]{2}[A-X]{2}[0-9]{2}[A-X]{2}|[A-R]{2}[0-9]{2}[A-X]{2}[0-9]{2}|[A-R]{2}[0-9]{2}[A-X]{2}|[A-R]{2}[0-9]{2})$", spot.dx_grid.upper()): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - '" + spot.dx_grid + "' does not look like a valid Maidenhead grid.", default=serialize_everything) + + # Reject if sig_ref format incorrect for sig + if spot.sig and spot.sig_refs and len(spot.sig_refs) > 0 and spot.sig_refs[0].id and get_ref_regex_for_sig(spot.sig) and not re.match(get_ref_regex_for_sig(spot.sig), spot.sig_refs[0].id): + response.content_type = 'application/json' + response.status = 422 + return json.dumps("Error - '" + spot.sig_refs[0].id + "' does not look like a valid reference for " + spot.sig + ".", default=serialize_everything) + + # infer missing data, and add it to our database. + spot.source = "API" + spot.infer_missing() + self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE) + + response.content_type = 'application/json' + response.set_header('Cache-Control', 'no-store') + response.status = 201 + return json.dumps("OK", default=serialize_everything) + except Exception as e: + logging.error(e) + response.content_type = 'application/json' + response.status = 500 + return json.dumps("Error - " + str(e), default=serialize_everything) + + # Serve a templated page + def serve_template(self, template_name): + self.last_page_access_time = datetime.now(pytz.UTC) + self.page_access_counter += 1 + page_requests_counter.inc() + self.status = "OK" + return template(template_name) + + # Serve general static files from "webassets" directory. + def serve_static_file(self, filepath): + return bottle.static_file(filepath, root="webassets") + + # Serve Prometheus metrics + def serve_prometheus_metrics(self): + return get_metrics() + + # Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in + # the main "spots" GET call. + def get_spot_list_with_filters(self): + # Get the query (and the right one, with Bottle magic. This is a MultiDict object) + query = bottle.request.query + + # Create a shallow copy of the spot list, ordered by spot time, then filter the list to reduce it only to spots + # that match the filter parameters in the query string. Finally, apply a limit to the number of spots returned. + # The list of query string filters is defined in the API docs. + spot_ids = list(self.spots.iterkeys()) + spots = [] + for k in spot_ids: + s = self.spots.get(k) + if s is not None: + spots.append(s) + spots = sorted(spots, key=lambda spot: (spot.time if spot and spot.time else 0), reverse=True) + spots = list(filter(lambda spot: spot_allowed_by_query(spot, query), spots)) + if "limit" in query.keys(): + spots = spots[:int(query.get("limit"))] + + # Ensure only the latest spot of each callsign-SSID combo is present in the list. This relies on the + # list being in reverse time order, so if any future change allows re-ordering the list, that should + # be done *after* this. SSIDs are deliberately included here (see issue #68) because e.g. M0TRT-7 + # and M0TRT-9 APRS transponders could well be in different locations, on different frequencies etc. + # This is a special consideration for the geo map and band map views (and Field Spotter) because while + # duplicates are fine in the main spot list (e.g. different cluster spots of the same DX) this doesn't + # work well for the other views. + if "dedupe" in query.keys(): + dedupe = query.get("dedupe").upper() == "TRUE" + if dedupe: + spots_temp = [] + already_seen = [] + for s in spots: + call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "") + if call_plus_ssid not in already_seen: + spots_temp.append(s) + already_seen.append(call_plus_ssid) + spots = spots_temp + + return spots + + # Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in + # the main "alerts" GET call. + def get_alert_list_with_filters(self): + # Get the query (and the right one, with Bottle magic. This is a MultiDict object) + query = bottle.request.query + + # Create a shallow copy of the alert list ordered by start time, then filter the list to reduce it only to alerts + # that match the filter parameters in the query string. Finally, apply a limit to the number of alerts returned. + # The list of query string filters is defined in the API docs. + alert_ids = list(self.alerts.iterkeys()) + alerts = [] + for k in alert_ids: + a = self.alerts.get(k) + if a is not None: + alerts.append(a) + alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0)) + alerts = list(filter(lambda alert: alert_allowed_by_query(alert, query), alerts)) + if "limit" in query.keys(): + alerts = alerts[:int(query.get("limit"))] + return alerts + + # Return all the "options" for various things that the server is aware of. This can be fetched with an API call. + # The idea is that this will include most of the things that can be provided as queries to the main spots call, + # and thus a client can use this data to configure its filter controls. + def get_options(self): + options = {"bands": BANDS, + "modes": ALL_MODES, + "mode_types": MODE_TYPES, + "sigs": SIGS, + # Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. + "spot_sources": list( + map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), + "alert_sources": list( + map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), + "continents": CONTINENTS, + "max_spot_age": MAX_SPOT_AGE, + "spot_allowed": ALLOW_SPOTTING, + "web-ui-options": WEB_UI_OPTIONS} + # If spotting to this server is enabled, "API" is another valid spot source even though it does not come from + # one of our proviers. + if ALLOW_SPOTTING: + options["spot_sources"].append("API") + + return options + + # Internal method called when a new spot is added to the system. This is used to ping any SSE clients that are + # awaiting a server-sent message with new spots. + def notify_new_spot(self, spot): + for queue in self.sse_spot_queues: + try: + queue.put(spot) + except: + # Cleanup thread was probably deleting the queue, that's fine + pass + + # Internal method called when a new alert is added to the system. This is used to ping any SSE clients that are + # awaiting a server-sent message with new spots. + def notify_new_alert(self, alert): + for queue in self.sse_alert_queues: + try: + queue.put(alert) + except: + # Cleanup thread was probably deleting the queue, that's fine + pass + + # Clean up any SSE queues that are growing too large; probably their client disconnected. + def clean_up_sse_queues(self): + self.sse_spot_queues = [q for q in self.sse_spot_queues if not q.full()] + self.sse_alert_queues = [q for q in self.sse_alert_queues if not q.full()] + + +# Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list +# of query parameters and their function is defined in the API docs. +def spot_allowed_by_query(spot, query): + for k in query.keys(): + match k: + case "since": + since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC).timestamp() + if not spot.time or spot.time <= since: + return False + case "max_age": + max_age = int(query.get(k)) + since = (datetime.now(pytz.UTC) - timedelta(seconds=max_age)).timestamp() + if not spot.time or spot.time <= since: + return False + case "received_since": + since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC).timestamp() + if not spot.received_time or spot.received_time <= since: + return False + case "source": + sources = query.get(k).split(",") + if not spot.source or spot.source not in sources: + return False + case "sig": + # If a list of sigs is provided, the spot must have a sig and it must match one of them. + # The special "sig" "NO_SIG", when supplied in the list, mathches spots with no sig. + sigs = query.get(k).split(",") + include_no_sig = "NO_SIG" in sigs + if not spot.sig and not include_no_sig: + return False + if spot.sig and spot.sig not in sigs: + return False + case "needs_sig": + # If true, a sig is required, regardless of what it is, it just can't be missing. Mutually + # exclusive with supplying the special "NO_SIG" parameter to the "sig" query param. + needs_sig = query.get(k).upper() == "TRUE" + if needs_sig and not spot.sig: + return False + case "needs_sig_ref": + # If true, at least one sig ref is required, regardless of what it is, it just can't be missing. + needs_sig_ref = query.get(k).upper() == "TRUE" + if needs_sig_ref and (not spot.sig_refs or len(spot.sig_refs) == 0): + return False + case "band": + bands = query.get(k).split(",") + if not spot.band or spot.band not in bands: + return False + case "mode": + modes = query.get(k).split(",") + if not spot.mode or spot.mode not in modes: + return False + case "mode_type": + mode_types = query.get(k).split(",") + if not spot.mode_type or spot.mode_type not in mode_types: + return False + case "dx_continent": + dxconts = query.get(k).split(",") + if not spot.dx_continent or spot.dx_continent not in dxconts: + return False + case "de_continent": + deconts = query.get(k).split(",") + if not spot.de_continent or spot.de_continent not in deconts: + return False + case "comment_includes": + comment_includes = query.get(k).strip() + if not spot.comment or comment_includes.upper() not in spot.comment.upper(): + return False + case "dx_call_includes": + dx_call_includes = query.get(k).strip() + if not spot.dx_call or dx_call_includes.upper() not in spot.dx_call.upper(): + return False + case "allow_qrt": + # If false, spots that are flagged as QRT are not returned. + prevent_qrt = query.get(k).upper() == "FALSE" + if prevent_qrt and spot.qrt and spot.qrt == True: + return False + case "needs_good_location": + # If true, spots require a "good" location to be returned + needs_good_location = query.get(k).upper() == "TRUE" + if needs_good_location and not spot.dx_location_good: + return False + return True + +# Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list +# of query parameters and their function is defined in the API docs. +def alert_allowed_by_query(alert, query): + for k in query.keys(): + match k: + case "received_since": + since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC) + if not alert.received_time or alert.received_time <= since: + return False + case "max_duration": + max_duration = int(query.get(k)) + # Check the duration if end_time is provided. If end_time is not provided, assume the activation is + # "short", i.e. it always passes this check. If dxpeditions_skip_max_duration_check is true and + # the alert is a dxpedition, it also always passes the check. + if alert.is_dxpedition and (bool(query.get( + "dxpeditions_skip_max_duration_check")) if "dxpeditions_skip_max_duration_check" in query.keys() else False): + continue + if alert.end_time and alert.start_time and alert.end_time - alert.start_time > max_duration: + return False + case "source": + sources = query.get(k).split(",") + if not alert.source or alert.source not in sources: + return False + case "sig": + # If a list of sigs is provided, the alert must have a sig and it must match one of them. + # The special "sig" "NO_SIG", when supplied in the list, mathches alerts with no sig. + sigs = query.get(k).split(",") + include_no_sig = "NO_SIG" in sigs + if not alert.sig and not include_no_sig: + return False + if alert.sig and alert.sig not in sigs: + return False + case "dx_continent": + dxconts = query.get(k).split(",") + if not alert.dx_continent or alert.dx_continent not in dxconts: + return False + case "dx_call_includes": + dx_call_includes = query.get(k).strip() + if not alert.dx_call or dx_call_includes.upper() not in alert.dx_call.upper(): + return False + return True + +# Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things. +# Just converts objects to dict. Try to avoid doing anything clever here when serialising spots, because we also need +# to receive spots without complex handling. +def serialize_everything(obj): + return obj.__dict__ \ No newline at end of file diff --git a/spothole.py b/spothole.py index a8a80c7..f8fba13 100644 --- a/spothole.py +++ b/spothole.py @@ -1,9 +1,4 @@ # Main script -from time import sleep - -import gevent -from gevent import monkey; monkey.patch_all() - import importlib import logging import signal @@ -33,7 +28,8 @@ run = True def shutdown(sig, frame): global run - logging.info("Stopping program, this may take a few seconds...") + logging.info("Stopping program, this may take up to 60 seconds...") + web_server.stop() for p in spot_providers: if p.enabled: p.stop() @@ -44,7 +40,6 @@ def shutdown(sig, frame): lookup_helper.stop() spots.close() alerts.close() - run = False # Utility method to get a spot provider based on the class specified in its config entry. @@ -84,7 +79,6 @@ if __name__ == '__main__': # Set up web server web_server = WebServer(spots=spots, alerts=alerts, status_data=status_data, port=WEB_SERVER_PORT) - web_server.start() # Fetch, set up and start spot providers for entry in config["spot-providers"]: @@ -114,6 +108,7 @@ if __name__ == '__main__': logging.info("Startup complete.") - while run: - gevent.sleep(1) - exit(0) + # Run the web server. This is the blocking call that keeps the application running in the main thread, so this must + # be the last thing we do. web_server.stop() triggers an await condition in the web server which finishes the main + # thread. + web_server.start() diff --git a/views/webpage_about.tpl b/templates/about.html similarity index 99% rename from views/webpage_about.tpl rename to templates/about.html index 16fe182..e7ddb71 100644 --- a/views/webpage_about.tpl +++ b/templates/about.html @@ -1,4 +1,5 @@ -% rebase('webpage_base.tpl') +{% extends "base.html" %} +{% block content %}

About Spothole

@@ -62,5 +63,7 @@

This software is dedicated to the memory of Tom G1PJB, SK, a friend and colleague who sadly passed away around the time I started writing it in Autumn 2025. I was looking forward to showing it to you when it was done.

- - \ No newline at end of file + + + +{% end %} \ No newline at end of file diff --git a/views/webpage_add_spot.tpl b/templates/add_spot.html similarity index 96% rename from views/webpage_add_spot.tpl rename to templates/add_spot.html index a92c076..4b67e16 100644 --- a/views/webpage_add_spot.tpl +++ b/templates/add_spot.html @@ -1,4 +1,5 @@ -% rebase('webpage_base.tpl') +{% extends "base.html" %} +{% block content %}
- - - \ No newline at end of file + + + + +{% end %} \ No newline at end of file diff --git a/views/webpage_alerts.tpl b/templates/alerts.html similarity index 98% rename from views/webpage_alerts.tpl rename to templates/alerts.html index 82cb5f4..c404b99 100644 --- a/views/webpage_alerts.tpl +++ b/templates/alerts.html @@ -1,4 +1,5 @@ -% rebase('webpage_base.tpl') +{% extends "base.html" %} +{% block content %}
@@ -167,6 +168,8 @@
- - - \ No newline at end of file + + + + +{% end %} \ No newline at end of file diff --git a/views/webpage_apidocs.tpl b/templates/apidocs.html similarity index 82% rename from views/webpage_apidocs.tpl rename to templates/apidocs.html index f65d976..854e02a 100644 --- a/views/webpage_apidocs.tpl +++ b/templates/apidocs.html @@ -1,5 +1,8 @@ -% rebase('webpage_base.tpl') +{% extends "base.html" %} +{% block content %} - \ No newline at end of file + + +{% end %} \ No newline at end of file diff --git a/views/webpage_bands.tpl b/templates/bands.html similarity index 96% rename from views/webpage_bands.tpl rename to templates/bands.html index addda42..80a967b 100644 --- a/views/webpage_bands.tpl +++ b/templates/bands.html @@ -1,4 +1,5 @@ -% rebase('webpage_base.tpl') +{% extends "base.html" %} +{% block content %}
@@ -128,7 +129,9 @@
- - - - \ No newline at end of file + + + + + +{% end %} \ No newline at end of file diff --git a/views/webpage_base.tpl b/templates/base.html similarity index 97% rename from views/webpage_base.tpl rename to templates/base.html index 9e90875..af9c42b 100644 --- a/views/webpage_base.tpl +++ b/templates/base.html @@ -35,7 +35,7 @@ - + @@ -62,9 +62,9 @@ - % if allow_spotting: + {% if allow_spotting %} - % end + {% end %} @@ -75,7 +75,7 @@
-{{!base}} +{% block content %}{% end %}
diff --git a/views/webpage_map.tpl b/templates/map.html similarity index 97% rename from views/webpage_map.tpl rename to templates/map.html index 7bc7994..1f1c1e8 100644 --- a/views/webpage_map.tpl +++ b/templates/map.html @@ -1,4 +1,5 @@ -% rebase('webpage_base.tpl') +{% extends "base.html" %} +{% block content %}
@@ -146,7 +147,9 @@ - - - - \ No newline at end of file + + + + + +{% end %} \ No newline at end of file diff --git a/views/webpage_spots.tpl b/templates/spots.html similarity index 98% rename from views/webpage_spots.tpl rename to templates/spots.html index 153d095..abecef0 100644 --- a/views/webpage_spots.tpl +++ b/templates/spots.html @@ -1,4 +1,5 @@ -% rebase('webpage_base.tpl') +{% extends "base.html" %} +{% block content %}
- - - - \ No newline at end of file + + + + + +{% end %} \ No newline at end of file diff --git a/views/webpage_status.tpl b/templates/status.html similarity index 59% rename from views/webpage_status.tpl rename to templates/status.html index 9699c15..12a0b99 100644 --- a/views/webpage_status.tpl +++ b/templates/status.html @@ -1,7 +1,10 @@ -% rebase('webpage_base.tpl') +{% extends "base.html" %} +{% block content %}
- - - \ No newline at end of file + + + + +{% end %} \ No newline at end of file diff --git a/webassets/img/favicon.ico b/webassets/favicon.ico similarity index 100% rename from webassets/img/favicon.ico rename to webassets/favicon.ico