diff --git a/alertproviders/alert_provider.py b/alertproviders/alert_provider.py index 59e6e4d..1c6f62c 100644 --- a/alertproviders/alert_provider.py +++ b/alertproviders/alert_provider.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timedelta import pytz @@ -35,8 +35,12 @@ class AlertProvider: for alert in alerts: # Fill in any blanks alert.infer_missing() - # Add to the list - self.alerts.add(alert.id, alert, expire=MAX_ALERT_AGE) + # Add to the list, provided it meets the semsible date test. If alerts have an end time, it must be in the + # future, or if not, then the start date must be at least in the last 24 hours. + if (alert.end_time and alert.end_time > datetime.now(pytz.UTC).timestamp()) or ( + not alert.end_time and alert.start_time > (datetime.now( + pytz.UTC) - timedelta(days=1)).timestamp()): + self.alerts.add(alert.id, alert, expire=MAX_ALERT_AGE) # Stop any threads and prepare for application shutdown def stop(self): diff --git a/core/cleanup.py b/core/cleanup.py index 3c7eba2..c97a63d 100644 --- a/core/cleanup.py +++ b/core/cleanup.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timedelta from threading import Timer from time import sleep @@ -32,6 +32,17 @@ class CleanupTimer: # Perform cleanup self.spots.expire() self.alerts.expire() + + # Alerts can persist in the system for a while, so we want to explicitly clean up any alerts that have + # definitively ended, or if they have no definite end time, then if the start time was more than 24 hours + # ago. + for id in list(self.alerts.iterkeys()): + alert = self.alerts[id] + if (alert.end_time and alert.end_time < datetime.now(pytz.UTC).timestamp()) or ( + not alert.end_time and alert.start_time < (datetime.now( + pytz.UTC) - timedelta(days=1)).timestamp()): + self.alerts.evict(id) + self.status = "OK" self.last_cleanup_time = datetime.now(pytz.UTC) @@ -41,4 +52,4 @@ class CleanupTimer: sleep(1) self.cleanup_timer = Timer(self.cleanup_interval, self.cleanup) - self.cleanup_timer.start() \ No newline at end of file + self.cleanup_timer.start() diff --git a/data/alert.py b/data/alert.py index d628450..687f892 100644 --- a/data/alert.py +++ b/data/alert.py @@ -1,7 +1,9 @@ +import hashlib import json from dataclasses import dataclass from datetime import datetime +import copy import pytz from core.constants import DXCC_FLAGS @@ -14,7 +16,7 @@ from core.utils import infer_continent_from_callsign, \ @dataclass class Alert: # Unique identifier for the alert - id: int = None + id: str = None # Callsign of the operator that has been alertted dx_call: str = None # Name of the operator that has been alertted @@ -103,8 +105,15 @@ class Alert: if self.dx_call and not self.dx_name: self.dx_name = infer_name_from_callsign(self.dx_call) - # Always create an ID based on a hashcode - self.id = hash(str(self)) + # Always create an ID based on a hash of every parameter *except* received_time. This is used as the index + # to a map, which as a byproduct avoids us having multiple duplicate copies of the object that are identical + # apart from that they were retrieved from the API at different times. Note that the simple Python hash() + # function includes a seed randomly generated at runtime; this is therefore not consistent between runs. But we + # use diskcache to store our data between runs, so we use SHA256 which does not include this random element. + self_copy = copy.deepcopy(self) + self_copy.received_time = 0 + self_copy.received_time_iso = "" + self.id = hashlib.sha256(str(self_copy).encode("utf-8")).hexdigest() # JSON serialise def to_json(self): diff --git a/data/spot.py b/data/spot.py index 21aa147..9e897d0 100644 --- a/data/spot.py +++ b/data/spot.py @@ -1,3 +1,5 @@ +import copy +import hashlib import json from dataclasses import dataclass from datetime import datetime @@ -16,7 +18,7 @@ from core.utils import infer_mode_type_from_mode, infer_band_from_freq, infer_co @dataclass class Spot: # Unique identifier for the spot - id: int = None + id: str = None # Callsign of the operator that has been spotted dx_call: str = None # Callsign of the operator that has spotted them @@ -207,8 +209,15 @@ class Spot: # is likely at home. self.location_good = self.location_source == "SPOT" or (self.location_source == "QRZ" and not "/" in self.dx_call) - # Always create an ID based on a hashcode - self.id = hash(str(self)) + # Always create an ID based on a hash of every parameter *except* received_time. This is used as the index + # to a map, which as a byproduct avoids us having multiple duplicate copies of the object that are identical + # apart from that they were retrieved from the API at different times. Note that the simple Python hash() + # function includes a seed randomly generated at runtime; this is therefore not consistent between runs. But we + # use diskcache to store our data between runs, so we use SHA256 which does not include this random element. + self_copy = copy.deepcopy(self) + self_copy.received_time = 0 + self_copy.received_time_iso = "" + self.id = hashlib.sha256(str(self_copy).encode("utf-8")).hexdigest() # JSON serialise def to_json(self): diff --git a/server/webserver.py b/server/webserver.py index cfbfbdc..c5e9dc6 100644 --- a/server/webserver.py +++ b/server/webserver.py @@ -70,7 +70,8 @@ class WebServer: if not ALLOW_SPOTTING: response.content_type = 'application/json' response.status = 401 - return json.dumps("Error - this server does not allow new spots to be added via the API.", default=serialize_everything) + return json.dumps("Error - this server does not allow new spots to be added via the API.", + default=serialize_everything) # Reject if no spot if not bottle.request.query.spot: @@ -86,7 +87,8 @@ class WebServer: if not spot.time or not spot.dx_call: response.content_type = 'application/json' response.status = 422 - return json.dumps("Error - 'time' and 'dx_call' must be provided as a minimum.", default=serialize_everything) + return json.dumps("Error - 'time' and 'dx_call' must be provided as a minimum.", + default=serialize_everything) # infer missing data, and add it to our database. spot.source = "API" @@ -169,7 +171,6 @@ class WebServer: spots = spots[:int(query.get("limit"))] return spots - # Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in # the main "alerts" GET call. def get_alert_list_with_filters(self): @@ -185,26 +186,26 @@ class WebServer: alert_ids = list(self.alerts.iterkeys()) alerts = [] for k in alert_ids: - # While we persist old spots in the system for a while to produce a useful list, any alert that has already - # passed its end time can be explicitly removed from the list to return. - # TODO deal with there being no end time - if self.alerts.get(k).end_time > datetime.now(pytz.UTC).timestamp(): - alerts.append(self.alerts.get(k)) + alerts.append(self.alerts.get(k)) alerts = sorted(alerts, key=lambda alert: alert.start_time) for k in query.keys(): match k: case "received_since": since = datetime.fromtimestamp(int(query.get(k)), pytz.UTC) - alerts = [s for s in alerts if s.received_time > since] + alerts = [a for a in alerts if a.received_time > since] + case "max_duration": + max_duration = int(query.get(k)) + alerts = [a for a in alerts if (a.end_time and a.end_time - a.start_time <= max_duration) or ( + not a.end_time and datetime.now(pytz.UTC).timestamp() - a.start_time <= max_duration)] case "source": sources = query.get(k).split(",") - alerts = [s for s in alerts if s.source in sources] + alerts = [a for a in alerts if a.source in sources] case "sig": sigs = query.get(k).split(",") - alerts = [s for s in alerts if s.sig in sigs] + alerts = [a for a in alerts if a.sig in sigs] case "dx_continent": dxconts = query.get(k).split(",") - alerts = [s for s in alerts if s.dx_continent in dxconts] + alerts = [a for a in alerts if a.dx_continent in dxconts] # If we have a "limit" parameter, we apply that last, regardless of where it appeared in the list of keys. if "limit" in query.keys(): alerts = alerts[:int(query.get("limit"))] @@ -219,7 +220,9 @@ class WebServer: "mode_types": MODE_TYPES, "sigs": SIGS, # Spot/alert sources are filtered for only ones that are enabled in config, no point letting the user toggle things that aren't even available. - "spot_sources": list(map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), - "alert_sources": list(map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), + "spot_sources": list( + map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["spot_providers"]))), + "alert_sources": list( + map(lambda p: p["name"], filter(lambda p: p["enabled"], self.status_data["alert_providers"]))), "continents": CONTINENTS, "max_spot_age": MAX_SPOT_AGE} diff --git a/views/webpage_about.tpl b/views/webpage_about.tpl index 81fadd4..959435d 100644 --- a/views/webpage_about.tpl +++ b/views/webpage_about.tpl @@ -9,4 +9,6 @@

Supported data sources include DX Clusters, the Reverse Beacon Network (RBN), the APRS Internet Service (APRS-IS), POTA, SOTA, WWFF, GMA, WWBOTA, HEMA, and Parks 'n' Peaks.

The software was written by Ian Renton, MØTRT and other contributors. Full details are available in the README.

« Back home

- \ No newline at end of file + + + \ No newline at end of file diff --git a/views/webpage_alerts.tpl b/views/webpage_alerts.tpl index 4808e0a..a00380f 100644 --- a/views/webpage_alerts.tpl +++ b/views/webpage_alerts.tpl @@ -25,7 +25,7 @@
-
+
@@ -41,4 +41,5 @@

- \ No newline at end of file + + \ No newline at end of file diff --git a/views/webpage_apidocs.tpl b/views/webpage_apidocs.tpl index f9e7f14..f65d976 100644 --- a/views/webpage_apidocs.tpl +++ b/views/webpage_apidocs.tpl @@ -1,4 +1,5 @@ % rebase('webpage_base.tpl') - \ No newline at end of file + + \ No newline at end of file diff --git a/views/webpage_base.tpl b/views/webpage_base.tpl index 13805fd..b8200cf 100644 --- a/views/webpage_base.tpl +++ b/views/webpage_base.tpl @@ -57,10 +57,10 @@ diff --git a/views/webpage_spots.tpl b/views/webpage_spots.tpl index e442da9..956c011 100644 --- a/views/webpage_spots.tpl +++ b/views/webpage_spots.tpl @@ -68,4 +68,5 @@

- \ No newline at end of file + + \ No newline at end of file diff --git a/webassets/apidocs/openapi.yml b/webassets/apidocs/openapi.yml index 30d8b28..0ef90e0 100644 --- a/webassets/apidocs/openapi.yml +++ b/webassets/apidocs/openapi.yml @@ -199,6 +199,12 @@ paths: required: false schema: type: integer + - name: max_duration + in: query + description: Limit the spots to only ones with a duration of this many seconds or less. Duration is end time minus start time, if end time is set, otherwise "now" minus start time. This is useful to filter out people who alert POTA activations lasting months or even years. + required: false + schema: + type: integer - name: source in: query description: "Limit the spots to only ones from one or more sources. To select more than one source, supply a comma-separated list." @@ -424,9 +430,9 @@ components: type: object properties: id: - type: integer + type: string description: Unique identifier based on a hash of the spot to distinguish this one from any others. - example: 123987609816349182 + example: 442c5d56ac467341f1943e8596685073b38f5a5d4c3802ca1e16ecf98967956c dx_call: type: string description: Callsign of the operator that has been spotted @@ -675,9 +681,9 @@ components: type: object properties: id: - type: integer + type: string description: Unique identifier based on a hash of the alert to distinguish this one from any others. - example: 123987609816349182 + example: 442c5d56ac467341f1943e8596685073b38f5a5d4c3802ca1e16ecf98967956c dx_call: type: string description: Callsign of the operator that is going to be activating diff --git a/webassets/css/style.css b/webassets/css/style.css index 5e9e285..77e95af 100644 --- a/webassets/css/style.css +++ b/webassets/css/style.css @@ -1,3 +1,7 @@ +.navbar-nav .nav-link.active { + font-weight: bold; +} + #info-container{ width: 100%; } diff --git a/webassets/js/alerts.js b/webassets/js/alerts.js index 40d40d3..c01e365 100644 --- a/webassets/js/alerts.js +++ b/webassets/js/alerts.js @@ -1,3 +1,6 @@ +// How often to query the server? +const REFRESH_INTERVAL_SEC = 60 * 30; + // Storage for the alert data that the server gives us. var alerts = [] // Storage for the options that the server gives us. This will define our filters. @@ -8,8 +11,9 @@ var lastUpdateTime; // Load alerts and populate the table. function loadAlerts() { $.getJSON('/api/alerts' + buildQueryString(), function(jsonData) { - // Present loaded time - $("#timing-container").text("Data loaded at " + moment.utc().format('HH:mm') + " UTC."); + // Store last updated time + lastUpdateTime = moment.utc(); + updateRefreshDisplay(); // Store data alerts = jsonData; // Update table @@ -26,6 +30,7 @@ function buildQueryString() { } }); str = str + "limit=" + $("#alerts-to-fetch option:selected").val(); + str = str + "&max_duration=604800"; return str; } @@ -64,71 +69,94 @@ function updateTable() { table.find('thead tr').append(`Source`); table.find('thead tr').append(`Ref.`); - if (alerts.length == 0) { + // Split alerts into three types, each of which will get its own table header: On now, next 24h, and later. "On now" + // is considered to be events with an end_time where start (a["end_time"] != null && moment.unix(a["end_time"]).utc().isSameOrAfter() && moment.unix(a["start_time"]).utc().isBefore()) + || (a["end_time"] == null && moment.unix(a["start_time"]).utc().add(1, 'hours').isSameOrAfter() && moment.unix(a["start_time"]).utc().isBefore())); + next24h = alerts.filter(a => moment.unix(a["start_time"]).utc().isSameOrAfter() && moment.unix(a["start_time"]).utc().subtract(24, 'hours').isBefore()); + later = alerts.filter(a => moment.unix(a["start_time"]).utc().subtract(24, 'hours').isSameOrAfter()); + + if (onNow.length > 0) { + table.find('tbody').append('On Now'); + addAlertRowsToTable(table.find('tbody'), onNow); + } + + if (next24h.length > 0) { + table.find('tbody').append('Starting within 24 hours'); + addAlertRowsToTable(table.find('tbody'), next24h); + } + + if (later.length > 0) { + table.find('tbody').append('Starting later '); + addAlertRowsToTable(table.find('tbody'), later); + } + + if (onNow.length == 0 && next24h.length == 0 && later.length == 0) { table.find('tbody').append('No alerts match your filters.'); } - alerts.forEach(s => { + // Update DOM + $('#table-container').html(table); +} + +// Add a row to tbody for each alert in the provided list +function addAlertRowsToTable(tbody, alerts) { + alerts.forEach(a => { // Create row let $tr = $(''); // Format UTC times for display - var start_time = moment.unix(s["start_time"]).utc(); + var start_time = moment.unix(a["start_time"]).utc(); var start_time_formatted = start_time.format("YYYY-MM-DD HH:mm"); - var end_time = moment.unix(s["start_time"]).utc(); + var end_time = moment.unix(a["end_time"]).utc(); var end_time_formatted = (end_time != null) ? end_time.format("YYYY-MM-DD HH:mm") : "Not specified"; // Format dx country - var dx_country = s["dx_country"] + var dx_country = a["dx_country"] if (dx_country == null) { dx_country = "Unknown or not a country" } // Format freqs & modes var freqsModesText = ""; - if (s["freqs_modes"] != null) { - freqsModesText = escapeHtml(s["freqs_modes"]); + if (a["freqs_modes"] != null) { + freqsModesText = escapeHtml(a["freqs_modes"]); } // Format comment var commentText = ""; - if (s["comment"] != null) { - commentText = escapeHtml(s["comment"]); + if (a["comment"] != null) { + commentText = escapeHtml(a["comment"]); } // Sig or fallback to source - var sigSourceText = s["source"]; - if (s["sig"]) { - sigSourceText = s["sig"]; + var sigSourceText = a["source"]; + if (a["sig"]) { + sigSourceText = a["sig"]; } // Format sig_refs var sig_refs = "" - if (s["sig_refs"]) { - sig_refs = s["sig_refs"].join(", ") + if (a["sig_refs"]) { + sig_refs = a["sig_refs"].join(", ") } // Populate the row $tr.append(`${start_time_formatted}`); $tr.append(`${end_time_formatted}`); - $tr.append(`${s["dx_flag"]}${s["dx_call"]}`); + $tr.append(`${a["dx_flag"]}${a["dx_call"]}`); $tr.append(`${freqsModesText}`); $tr.append(`${commentText}`); - $tr.append(` ${sigSourceText}`); + $tr.append(` ${sigSourceText}`); $tr.append(`${sig_refs}`); - table.find('tbody').append($tr); + tbody.append($tr); // Second row for mobile view only, containing source, ref, freqs/modes & comment $tr2 = $(""); - if (s["qrt"] == true) { - $tr2.addClass("table-faded"); - } - $tr2.append(` ${sig_refs} ${freqsModesText}
${commentText}`); - table.find('tbody').append($tr2); + $tr2.append(` ${sig_refs} ${freqsModesText}
${commentText}`); + tbody.append($tr2); }); - - // Update DOM - $('#table-container').html(table); } // Load server options. Once a successful callback is made from this, we then query alerts. @@ -144,8 +172,9 @@ function loadOptions() { // Load settings from settings storage loadSettings(); - // Load alerts + // Load alerts and set up the timer loadAlerts(); + setInterval(loadAlerts, REFRESH_INTERVAL_SEC * 1000); }); } @@ -183,6 +212,24 @@ function filtersUpdated() { saveSettings(); } +// Update the refresh timing display +function updateRefreshDisplay() { + if (lastUpdateTime != null) { + let count = REFRESH_INTERVAL_SEC; + let secSinceUpdate = moment.duration(moment().diff(lastUpdateTime)).asSeconds(); + updatingString = "Updating..." + if (secSinceUpdate < REFRESH_INTERVAL_SEC) { + count = REFRESH_INTERVAL_SEC - secSinceUpdate; + if (count <= 60) { + updatingString = "Updating in " + count.toFixed(0) + " seconds..."; + } else { + updatingString = "Updating in " + Math.floor(count / 60.0).toFixed(0) + " minutes."; + } + } + $("#timing-container").text("Last updated at " + lastUpdateTime.format('HH:mm') + " UTC. " + updatingString); + } +} + // Utility function to escape HTML characters from a string. function escapeHtml(str) { if (typeof str !== 'string') { @@ -244,6 +291,8 @@ function setUpEventListeners() { $(document).ready(function() { // Call loadOptions(), this will then trigger loading alerts and setting up timers. loadOptions(); + // Update the refresh timing display every second + setInterval(updateRefreshDisplay, 1000); // Set up event listeners setUpEventListeners(); }); \ No newline at end of file diff --git a/webassets/js/spots.js b/webassets/js/spots.js index ce70432..4674d63 100644 --- a/webassets/js/spots.js +++ b/webassets/js/spots.js @@ -314,7 +314,11 @@ function updateRefreshDisplay() { updatingString = "Updating..." if (secSinceUpdate < REFRESH_INTERVAL_SEC) { count = REFRESH_INTERVAL_SEC - secSinceUpdate; - updatingString = "Updating in " + count.toFixed(0) + " seconds..."; + if (count <= 60) { + updatingString = "Updating in " + count.toFixed(0) + " seconds..."; + } else { + updatingString = "Updating in " + Math.floor(count / 60.0).toFixed(0) + " minutes."; + } } $("#timing-container").text("Last updated at " + lastUpdateTime.format('HH:mm') + " UTC. " + updatingString); }