From 61784e8af69920e153bfe16a0761e56ae6495f25 Mon Sep 17 00:00:00 2001 From: Ian Renton Date: Tue, 23 Dec 2025 11:51:00 +0000 Subject: [PATCH] Split up some code for sanity #3 --- core/utils.py | 5 + server/handlers/api/addspot.py | 7 + server/handlers/api/alerts.py | 57 +++++++ server/handlers/api/lookups.py | 13 ++ server/handlers/api/options.py | 37 +++++ server/handlers/api/spots.py | 163 ++++++++++++++++++++ server/handlers/api/status.py | 17 +++ server/handlers/metrics.py | 12 ++ server/handlers/pagetemplate.py | 14 ++ server/webserver.py | 253 ++------------------------------ spothole.py | 4 +- webassets/js/spots.js | 1 + 12 files changed, 341 insertions(+), 242 deletions(-) create mode 100644 core/utils.py create mode 100644 server/handlers/api/addspot.py create mode 100644 server/handlers/api/alerts.py create mode 100644 server/handlers/api/lookups.py create mode 100644 server/handlers/api/options.py create mode 100644 server/handlers/api/spots.py create mode 100644 server/handlers/api/status.py create mode 100644 server/handlers/metrics.py create mode 100644 server/handlers/pagetemplate.py diff --git a/core/utils.py b/core/utils.py new file mode 100644 index 0000000..39e6d1a --- /dev/null +++ b/core/utils.py @@ -0,0 +1,5 @@ +# Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things. +# Just converts objects to dict. Try to avoid doing anything clever here when serialising spots, because we also need +# to receive spots without complex handling. +def serialize_everything(obj): + return obj.__dict__ diff --git a/server/handlers/api/addspot.py b/server/handlers/api/addspot.py new file mode 100644 index 0000000..609db16 --- /dev/null +++ b/server/handlers/api/addspot.py @@ -0,0 +1,7 @@ +import tornado + +# API request handler for /api/v1/spot (POST) +class APISpotHandler(tornado.web.RequestHandler): + def post(self): + # todo + self.write("Hello, world") diff --git a/server/handlers/api/alerts.py b/server/handlers/api/alerts.py new file mode 100644 index 0000000..c10afeb --- /dev/null +++ b/server/handlers/api/alerts.py @@ -0,0 +1,57 @@ +import tornado + +# API request handler for /api/v1/alerts +class APIAlertsHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") + +# API request handler for /api/v1/alerts/stream +class APIAlertsStreamHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") + + + +# Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list +# of query parameters and their function is defined in the API docs. +def alert_allowed_by_query(alert, query): + for k in query.keys(): + match k: + case "received_since": + since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC) + if not alert.received_time or alert.received_time <= since: + return False + case "max_duration": + max_duration = int(query.get(k)) + # Check the duration if end_time is provided. If end_time is not provided, assume the activation is + # "short", i.e. it always passes this check. If dxpeditions_skip_max_duration_check is true and + # the alert is a dxpedition, it also always passes the check. + if alert.is_dxpedition and (bool(query.get( + "dxpeditions_skip_max_duration_check")) if "dxpeditions_skip_max_duration_check" in query.keys() else False): + continue + if alert.end_time and alert.start_time and alert.end_time - alert.start_time > max_duration: + return False + case "source": + sources = query.get(k).split(",") + if not alert.source or alert.source not in sources: + return False + case "sig": + # If a list of sigs is provided, the alert must have a sig and it must match one of them. + # The special "sig" "NO_SIG", when supplied in the list, mathches alerts with no sig. + sigs = query.get(k).split(",") + include_no_sig = "NO_SIG" in sigs + if not alert.sig and not include_no_sig: + return False + if alert.sig and alert.sig not in sigs: + return False + case "dx_continent": + dxconts = query.get(k).split(",") + if not alert.dx_continent or alert.dx_continent not in dxconts: + return False + case "dx_call_includes": + dx_call_includes = query.get(k).strip() + if not alert.dx_call or dx_call_includes.upper() not in alert.dx_call.upper(): + return False + return True diff --git a/server/handlers/api/lookups.py b/server/handlers/api/lookups.py new file mode 100644 index 0000000..a262016 --- /dev/null +++ b/server/handlers/api/lookups.py @@ -0,0 +1,13 @@ +import tornado + +# API request handler for /api/v1/lookup/call +class APILookupCallHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") + +# API request handler for /api/v1/lookup/sigref +class APILookupSIGRefHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") diff --git a/server/handlers/api/options.py b/server/handlers/api/options.py new file mode 100644 index 0000000..6aaed90 --- /dev/null +++ b/server/handlers/api/options.py @@ -0,0 +1,37 @@ +import json + +import tornado + +from core.config import MAX_SPOT_AGE, ALLOW_SPOTTING, WEB_UI_OPTIONS +from core.constants import BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS +from core.utils import serialize_everything + + +# API request handler for /api/v1/options +class APIOptionsHandler(tornado.web.RequestHandler): + def initialize(self, status_data): + self.status_data = status_data + + def get(self): + options = {"bands": BANDS, + "modes": ALL_MODES, + "mode_types": MODE_TYPES, + "sigs": SIGS, + # Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. + "spot_sources": list( + map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), + "alert_sources": list( + map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), + "continents": CONTINENTS, + "max_spot_age": MAX_SPOT_AGE, + "spot_allowed": ALLOW_SPOTTING, + "web-ui-options": WEB_UI_OPTIONS} + # If spotting to this server is enabled, "API" is another valid spot source even though it does not come from + # one of our proviers. + if ALLOW_SPOTTING: + options["spot_sources"].append("API") + + self.write(json.dumps(options, default=serialize_everything)) + self.set_status(200) + self.set_header("Cache-Control", "no-store") + self.set_header("Content-Type", "application/json") diff --git a/server/handlers/api/spots.py b/server/handlers/api/spots.py new file mode 100644 index 0000000..0f6075c --- /dev/null +++ b/server/handlers/api/spots.py @@ -0,0 +1,163 @@ +import json +import logging +from datetime import datetime, timedelta + +import pytz +import tornado + +from core.utils import serialize_everything + + +# API request handler for /api/v1/spots +class APISpotsHandler(tornado.web.RequestHandler): + def initialize(self, spots): + self.spots = spots + + def get(self): + try: + # request.arguments contains lists for each param key because technically the client can supply multiple, + # reduce that to just the first entry + query_params = {k: v[0] for k, v in self.request.arguments.items()} + # Fetch all spots matching the query + data = get_spot_list_with_filters(self.spots, query_params) + self.write(json.dumps(data, default=serialize_everything)) + self.set_status(200) + except ValueError as e: + logging.error(e) + self.write(json.dumps("Bad request - " + str(e), default=serialize_everything)) + self.set_status(400) + except Exception as e: + logging.error(e) + self.write(json.dumps("Error - " + str(e), default=serialize_everything)) + self.set_status(500) + self.set_header("Cache-Control", "no-store") + self.set_header("Content-Type", "application/json") + + +# API request handler for /api/v1/spots/stream +class APISpotsStreamHandler(tornado.web.RequestHandler): + def get(self): + # todo + self.write("Hello, world") + + + +# Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in +# the main "spots" GET call. +def get_spot_list_with_filters(all_spots, query): + # Create a shallow copy of the spot list, ordered by spot time, then filter the list to reduce it only to spots + # that match the filter parameters in the query string. Finally, apply a limit to the number of spots returned. + # The list of query string filters is defined in the API docs. + spot_ids = list(all_spots.iterkeys()) + spots = [] + for k in spot_ids: + s = all_spots.get(k) + if s is not None: + spots.append(s) + spots = sorted(spots, key=lambda spot: (spot.time if spot and spot.time else 0), reverse=True) + spots = list(filter(lambda spot: spot_allowed_by_query(spot, query), spots)) + if "limit" in query.keys(): + spots = spots[:int(query.get("limit"))] + + # Ensure only the latest spot of each callsign-SSID combo is present in the list. This relies on the + # list being in reverse time order, so if any future change allows re-ordering the list, that should + # be done *after* this. SSIDs are deliberately included here (see issue #68) because e.g. M0TRT-7 + # and M0TRT-9 APRS transponders could well be in different locations, on different frequencies etc. + # This is a special consideration for the geo map and band map views (and Field Spotter) because while + # duplicates are fine in the main spot list (e.g. different cluster spots of the same DX) this doesn't + # work well for the other views. + if "dedupe" in query.keys(): + dedupe = query.get("dedupe").upper() == "TRUE" + if dedupe: + spots_temp = [] + already_seen = [] + for s in spots: + call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "") + if call_plus_ssid not in already_seen: + spots_temp.append(s) + already_seen.append(call_plus_ssid) + spots = spots_temp + + return spots + +# Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list +# of query parameters and their function is defined in the API docs. +def spot_allowed_by_query(spot, query): + for k in query.keys(): + match k: + case "since": + since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC).timestamp() + if not spot.time or spot.time <= since: + return False + case "max_age": + max_age = int(query.get(k)) + since = (datetime.now(pytz.UTC) - timedelta(seconds=max_age)).timestamp() + if not spot.time or spot.time <= since: + return False + case "received_since": + since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC).timestamp() + if not spot.received_time or spot.received_time <= since: + return False + case "source": + sources = query.get(k).split(",") + if not spot.source or spot.source not in sources: + return False + case "sig": + # If a list of sigs is provided, the spot must have a sig and it must match one of them. + # The special "sig" "NO_SIG", when supplied in the list, mathches spots with no sig. + sigs = query.get(k).split(",") + include_no_sig = "NO_SIG" in sigs + if not spot.sig and not include_no_sig: + return False + if spot.sig and spot.sig not in sigs: + return False + case "needs_sig": + # If true, a sig is required, regardless of what it is, it just can't be missing. Mutually + # exclusive with supplying the special "NO_SIG" parameter to the "sig" query param. + needs_sig = query.get(k).upper() == "TRUE" + if needs_sig and not spot.sig: + return False + case "needs_sig_ref": + # If true, at least one sig ref is required, regardless of what it is, it just can't be missing. + needs_sig_ref = query.get(k).upper() == "TRUE" + if needs_sig_ref and (not spot.sig_refs or len(spot.sig_refs) == 0): + return False + case "band": + bands = query.get(k).split(",") + if not spot.band or spot.band not in bands: + return False + case "mode": + modes = query.get(k).split(",") + if not spot.mode or spot.mode not in modes: + return False + case "mode_type": + mode_types = query.get(k).split(",") + if not spot.mode_type or spot.mode_type not in mode_types: + return False + case "dx_continent": + dxconts = query.get(k).split(",") + if not spot.dx_continent or spot.dx_continent not in dxconts: + return False + case "de_continent": + deconts = query.get(k).split(",") + if not spot.de_continent or spot.de_continent not in deconts: + return False + case "comment_includes": + comment_includes = query.get(k).strip() + if not spot.comment or comment_includes.upper() not in spot.comment.upper(): + return False + case "dx_call_includes": + dx_call_includes = query.get(k).strip() + if not spot.dx_call or dx_call_includes.upper() not in spot.dx_call.upper(): + return False + case "allow_qrt": + # If false, spots that are flagged as QRT are not returned. + prevent_qrt = query.get(k).upper() == "FALSE" + if prevent_qrt and spot.qrt and spot.qrt == True: + return False + case "needs_good_location": + # If true, spots require a "good" location to be returned + needs_good_location = query.get(k).upper() == "TRUE" + if needs_good_location and not spot.dx_location_good: + return False + return True \ No newline at end of file diff --git a/server/handlers/api/status.py b/server/handlers/api/status.py new file mode 100644 index 0000000..9a5219e --- /dev/null +++ b/server/handlers/api/status.py @@ -0,0 +1,17 @@ +import json + +import tornado + +from core.utils import serialize_everything + + +# API request handler for /api/v1/status +class APIStatusHandler(tornado.web.RequestHandler): + def initialize(self, status_data): + self.status_data = status_data + + def get(self): + self.write(json.dumps(self.status_data, default=serialize_everything)) + self.set_status(200) + self.set_header("Cache-Control", "no-store") + self.set_header("Content-Type", "application/json") diff --git a/server/handlers/metrics.py b/server/handlers/metrics.py new file mode 100644 index 0000000..8644ac3 --- /dev/null +++ b/server/handlers/metrics.py @@ -0,0 +1,12 @@ +import tornado +from prometheus_client.openmetrics.exposition import CONTENT_TYPE_LATEST + +from core.prometheus_metrics_handler import get_metrics + + +# Handler for Prometheus metrics endpoint +class PrometheusMetricsHandler(tornado.web.RequestHandler): + def get(self): + self.write(get_metrics()) + self.set_status(200) + self.set_header('Content-Type', CONTENT_TYPE_LATEST) diff --git a/server/handlers/pagetemplate.py b/server/handlers/pagetemplate.py new file mode 100644 index 0000000..984a613 --- /dev/null +++ b/server/handlers/pagetemplate.py @@ -0,0 +1,14 @@ +import tornado + +from core.config import ALLOW_SPOTTING +from core.constants import SOFTWARE_VERSION + + +# Handler for all HTML pages generated from templates +class PageTemplateHandler(tornado.web.RequestHandler): + def initialize(self, template_name): + self.template_name = template_name + + def get(self): + # Load named template, and provide variables used in templates + self.render(self.template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING) diff --git a/server/webserver.py b/server/webserver.py index 7300e80..ed67d9f 100644 --- a/server/webserver.py +++ b/server/webserver.py @@ -1,20 +1,23 @@ import asyncio -import json import os -from datetime import datetime, timedelta -import pytz import tornado -from prometheus_client.openmetrics.exposition import CONTENT_TYPE_LATEST from tornado.web import StaticFileHandler -from core.config import ALLOW_SPOTTING, MAX_SPOT_AGE, WEB_UI_OPTIONS -from core.constants import SOFTWARE_VERSION, BANDS, ALL_MODES, MODE_TYPES, SIGS, CONTINENTS -from core.prometheus_metrics_handler import get_metrics, page_requests_counter +from server.handlers.api.addspot import APISpotHandler +from server.handlers.api.alerts import APIAlertsHandler, APIAlertsStreamHandler +from server.handlers.api.lookups import APILookupCallHandler, APILookupSIGRefHandler +from server.handlers.api.options import APIOptionsHandler +from server.handlers.api.spots import APISpotsHandler, APISpotsStreamHandler +from server.handlers.api.status import APIStatusHandler +from server.handlers.metrics import PrometheusMetricsHandler +from server.handlers.pagetemplate import PageTemplateHandler # Provides the public-facing web server. -# TODO synchronous API responses +# TODO alerts API +# TODO lookup APIs +# TODO post spot API # TODO SSE API responses # TODO clean_up_sse_queues # TODO page & API access counters - how to do from a subclass handler? e.g. @@ -56,7 +59,7 @@ class WebServer: async def start_inner(self): app = tornado.web.Application([ # Routes for API calls - (r"/api/v1/spots", APISpotsHandler), + (r"/api/v1/spots", APISpotsHandler, {"spots": self.spots}), (r"/api/v1/alerts", APIAlertsHandler), (r"/api/v1/spots/stream", APISpotsStreamHandler), (r"/api/v1/alerts/stream", APIAlertsStreamHandler), @@ -88,235 +91,3 @@ class WebServer: def clean_up_sse_queues(self): # todo pass - - -# API request handler for /api/v1/spots -class APISpotsHandler(tornado.web.RequestHandler): - def get(self): - # todo - self.write("Hello, world") - -# API request handler for /api/v1/alerts -class APIAlertsHandler(tornado.web.RequestHandler): - def get(self): - # todo - self.write("Hello, world") - -# API request handler for /api/v1/spots/stream -class APISpotsStreamHandler(tornado.web.RequestHandler): - def get(self): - # todo - self.write("Hello, world") - -# API request handler for /api/v1/alerts/stream -class APIAlertsStreamHandler(tornado.web.RequestHandler): - def get(self): - # todo - self.write("Hello, world") - -# API request handler for /api/v1/options -class APIOptionsHandler(tornado.web.RequestHandler): - def initialize(self, status_data): - self.status_data = status_data - - def get(self): - options = {"bands": BANDS, - "modes": ALL_MODES, - "mode_types": MODE_TYPES, - "sigs": SIGS, - # Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. - "spot_sources": list( - map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), - "alert_sources": list( - map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), - "continents": CONTINENTS, - "max_spot_age": MAX_SPOT_AGE, - "spot_allowed": ALLOW_SPOTTING, - "web-ui-options": WEB_UI_OPTIONS} - # If spotting to this server is enabled, "API" is another valid spot source even though it does not come from - # one of our proviers. - if ALLOW_SPOTTING: - options["spot_sources"].append("API") - - self.write(json.dumps(options, default=serialize_everything)) - self.set_header("Cache-Control", "no-store") - self.set_header("Content-Type", "application/json") - -# API request handler for /api/v1/status -class APIStatusHandler(tornado.web.RequestHandler): - def initialize(self, status_data): - self.status_data = status_data - - def get(self): - self.write(json.dumps(self.status_data, default=serialize_everything)) - self.set_header("Cache-Control", "no-store") - self.set_header("Content-Type", "application/json") - -# API request handler for /api/v1/lookup/call -class APILookupCallHandler(tornado.web.RequestHandler): - def get(self): - # todo - self.write("Hello, world") - -# API request handler for /api/v1/lookup/sigref -class APILookupSIGRefHandler(tornado.web.RequestHandler): - def get(self): - # todo - self.write("Hello, world") - -# API request handler for /api/v1/spot (POST) -class APISpotHandler(tornado.web.RequestHandler): - def post(self): - # todo - self.write("Hello, world") - - -# Handler for all HTML pages generated from templates -class PageTemplateHandler(tornado.web.RequestHandler): - def initialize(self, template_name): - self.template_name = template_name - - def get(self): - # Load named template, and provide variables used in templates - self.render(self.template_name + ".html", software_version=SOFTWARE_VERSION, allow_spotting=ALLOW_SPOTTING) - -# Handler for Prometheus metrics endpoint -class PrometheusMetricsHandler(tornado.web.RequestHandler): - def get(self): - self.write(get_metrics()) - self.set_status(200) - self.set_header('Content-Type', CONTENT_TYPE_LATEST) - - - -# Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list -# of query parameters and their function is defined in the API docs. -def spot_allowed_by_query(spot, query): - for k in query.keys(): - match k: - case "since": - since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC).timestamp() - if not spot.time or spot.time <= since: - return False - case "max_age": - max_age = int(query.get(k)) - since = (datetime.now(pytz.UTC) - timedelta(seconds=max_age)).timestamp() - if not spot.time or spot.time <= since: - return False - case "received_since": - since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC).timestamp() - if not spot.received_time or spot.received_time <= since: - return False - case "source": - sources = query.get(k).split(",") - if not spot.source or spot.source not in sources: - return False - case "sig": - # If a list of sigs is provided, the spot must have a sig and it must match one of them. - # The special "sig" "NO_SIG", when supplied in the list, mathches spots with no sig. - sigs = query.get(k).split(",") - include_no_sig = "NO_SIG" in sigs - if not spot.sig and not include_no_sig: - return False - if spot.sig and spot.sig not in sigs: - return False - case "needs_sig": - # If true, a sig is required, regardless of what it is, it just can't be missing. Mutually - # exclusive with supplying the special "NO_SIG" parameter to the "sig" query param. - needs_sig = query.get(k).upper() == "TRUE" - if needs_sig and not spot.sig: - return False - case "needs_sig_ref": - # If true, at least one sig ref is required, regardless of what it is, it just can't be missing. - needs_sig_ref = query.get(k).upper() == "TRUE" - if needs_sig_ref and (not spot.sig_refs or len(spot.sig_refs) == 0): - return False - case "band": - bands = query.get(k).split(",") - if not spot.band or spot.band not in bands: - return False - case "mode": - modes = query.get(k).split(",") - if not spot.mode or spot.mode not in modes: - return False - case "mode_type": - mode_types = query.get(k).split(",") - if not spot.mode_type or spot.mode_type not in mode_types: - return False - case "dx_continent": - dxconts = query.get(k).split(",") - if not spot.dx_continent or spot.dx_continent not in dxconts: - return False - case "de_continent": - deconts = query.get(k).split(",") - if not spot.de_continent or spot.de_continent not in deconts: - return False - case "comment_includes": - comment_includes = query.get(k).strip() - if not spot.comment or comment_includes.upper() not in spot.comment.upper(): - return False - case "dx_call_includes": - dx_call_includes = query.get(k).strip() - if not spot.dx_call or dx_call_includes.upper() not in spot.dx_call.upper(): - return False - case "allow_qrt": - # If false, spots that are flagged as QRT are not returned. - prevent_qrt = query.get(k).upper() == "FALSE" - if prevent_qrt and spot.qrt and spot.qrt == True: - return False - case "needs_good_location": - # If true, spots require a "good" location to be returned - needs_good_location = query.get(k).upper() == "TRUE" - if needs_good_location and not spot.dx_location_good: - return False - return True - - -# Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list -# of query parameters and their function is defined in the API docs. -def alert_allowed_by_query(alert, query): - for k in query.keys(): - match k: - case "received_since": - since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC) - if not alert.received_time or alert.received_time <= since: - return False - case "max_duration": - max_duration = int(query.get(k)) - # Check the duration if end_time is provided. If end_time is not provided, assume the activation is - # "short", i.e. it always passes this check. If dxpeditions_skip_max_duration_check is true and - # the alert is a dxpedition, it also always passes the check. - if alert.is_dxpedition and (bool(query.get( - "dxpeditions_skip_max_duration_check")) if "dxpeditions_skip_max_duration_check" in query.keys() else False): - continue - if alert.end_time and alert.start_time and alert.end_time - alert.start_time > max_duration: - return False - case "source": - sources = query.get(k).split(",") - if not alert.source or alert.source not in sources: - return False - case "sig": - # If a list of sigs is provided, the alert must have a sig and it must match one of them. - # The special "sig" "NO_SIG", when supplied in the list, mathches alerts with no sig. - sigs = query.get(k).split(",") - include_no_sig = "NO_SIG" in sigs - if not alert.sig and not include_no_sig: - return False - if alert.sig and alert.sig not in sigs: - return False - case "dx_continent": - dxconts = query.get(k).split(",") - if not alert.dx_continent or alert.dx_continent not in dxconts: - return False - case "dx_call_includes": - dx_call_includes = query.get(k).strip() - if not alert.dx_call or dx_call_includes.upper() not in alert.dx_call.upper(): - return False - return True - - -# Convert objects to serialisable things. Used by JSON serialiser as a default when it encounters unserializable things. -# Just converts objects to dict. Try to avoid doing anything clever here when serialising spots, because we also need -# to receive spots without complex handling. -def serialize_everything(obj): - return obj.__dict__ diff --git a/spothole.py b/spothole.py index f8fba13..fc6ff33 100644 --- a/spothole.py +++ b/spothole.py @@ -1,6 +1,7 @@ # Main script import importlib import logging +import os import signal import sys @@ -28,7 +29,7 @@ run = True def shutdown(sig, frame): global run - logging.info("Stopping program, this may take up to 60 seconds...") + logging.info("Stopping program...") web_server.stop() for p in spot_providers: if p.enabled: @@ -40,6 +41,7 @@ def shutdown(sig, frame): lookup_helper.stop() spots.close() alerts.close() + os._exit(0) # Utility method to get a spot provider based on the class specified in its config entry. diff --git a/webassets/js/spots.js b/webassets/js/spots.js index 048ea56..05b07c7 100644 --- a/webassets/js/spots.js +++ b/webassets/js/spots.js @@ -48,6 +48,7 @@ function restartSSEConnection() { }; evtSource.onerror = function(err) { + evtSource.close(); setTimeout(restartSSEConnection(), 1000); }; }