import copy import hashlib import json from dataclasses import dataclass from datetime import datetime, timedelta import pytz from core.lookup_helper import lookup_helper from core.sig_utils import populate_sig_ref_info @dataclass class Alert: """Data class that defines an alert.""" # Unique identifier for the alert id: str = None # Callsigns of the operators that has been alerted dx_calls: list = None # Names of the operators that has been alerted dx_names: list = None # Country of the DX operator dx_country: str = None # Country flag of the DX operator dx_flag: str = None # Continent of the DX operator dx_continent: str = None # DXCC ID of the DX operator dx_dxcc_id: int = None # CQ zone of the DX operator dx_cq_zone: int = None # ITU zone of the DX operator dx_itu_zone: int = None # Intended frequencies & modes of operation. Essentially just a different kind of comment field. freqs_modes: str = None # Start time of the activation, UTC seconds since UNIX epoch start_time: float = None # Start time of the activation of the alert, ISO 8601 start_time_iso: str = None # End time of the activation, UTC seconds since UNIX epoch. Optional end_time: float = None # End time of the activation of the alert, ISO 8601 end_time_iso: str = None # Time that this software received the alert, UTC seconds since UNIX epoch. This is used with the "since_received" # call to our API to receive all data that is new to us, even if by a quirk of the API it might be older than the # list time the client polled the API. received_time: float = None # Time that this software received the alert, ISO 8601 received_time_iso: str = None # Comment made by the alerter, if any comment: str = None # Special Interest Group (SIG), e.g. outdoor activity programme such as POTA sig: str = None # SIG references. We allow multiple here for e.g. n-fer activations, unlike ADIF SIG_INFO sig_refs: list = None # Whether this alert is for a DXpedition, as opposed to e.g. an xOTA programme. is_dxpedition: bool = False # Where we got the alert from, e.g. "POTA", "SOTA"... source: str = None # The ID the source gave it, if any. source_id: str = None def infer_missing(self): """Infer missing parameters where possible""" # If we somehow don't have a start time, set it to zero so it sorts off the bottom of any list but # clients can still reliably parse it as a number. if not self.start_time: self.start_time = 0 # If we don't have a received time, this has just been received so set that to "now" if not self.received_time: self.received_time = datetime.now(pytz.UTC).timestamp() # Fill in ISO versions of times, in case the client prefers that if self.start_time and not self.start_time_iso: self.start_time_iso = datetime.fromtimestamp(self.start_time, pytz.UTC).isoformat() if self.end_time and not self.end_time_iso: self.end_time_iso = datetime.fromtimestamp(self.end_time, pytz.UTC).isoformat() if self.received_time and not self.received_time_iso: self.received_time_iso = datetime.fromtimestamp(self.received_time, pytz.UTC).isoformat() # DX country, continent, zones etc. from callsign. CQ/ITU zone are better looked up with a location but we don't # have a real location for alerts. if self.dx_calls and self.dx_calls[0] and not self.dx_country: self.dx_country = lookup_helper.infer_country_from_callsign(self.dx_calls[0]) if self.dx_calls and self.dx_calls[0] and not self.dx_continent: self.dx_continent = lookup_helper.infer_continent_from_callsign(self.dx_calls[0]) if self.dx_calls and self.dx_calls[0] and not self.dx_cq_zone: self.dx_cq_zone = lookup_helper.infer_cq_zone_from_callsign(self.dx_calls[0]) if self.dx_calls and self.dx_calls[0] and not self.dx_itu_zone: self.dx_itu_zone = lookup_helper.infer_itu_zone_from_callsign(self.dx_calls[0]) if self.dx_calls and self.dx_calls[0] and not self.dx_dxcc_id: self.dx_dxcc_id = lookup_helper.infer_dxcc_id_from_callsign(self.dx_calls[0]) if self.dx_dxcc_id and not self.dx_flag: self.dx_flag = lookup_helper.get_flag_for_dxcc(self.dx_dxcc_id) # Fetch SIG data. In case a particular API doesn't provide a full set of name, lat, lon & grid for a reference # in its initial call, we use this code to populate the rest of the data. This includes working out grid refs # from WAB and WAI, which count as a SIG even though there's no real lookup, just maths if self.sig_refs and len(self.sig_refs) > 0: for sig_ref in self.sig_refs: populate_sig_ref_info(sig_ref) # If the spot itself doesn't have a SIG yet, but we have at least one SIG reference, take that reference's SIG # and apply it to the whole spot. if self.sig_refs and len(self.sig_refs) > 0 and self.sig_refs[0] and not self.sig: self.sig = self.sig_refs[0].sig # DX operator details lookup, using QRZ.com. This should be the last resort compared to taking the data from # the actual alertting service, e.g. we don't want to accidentally use a user's QRZ.com home lat/lon instead of # the one from the park reference they're at. if self.dx_calls and not self.dx_names: self.dx_names = list(map(lambda c: lookup_helper.infer_name_from_callsign_online_lookup(c), self.dx_calls)) # Always create an ID based on a hash of every parameter *except* received_time. This is used as the index # to a map, which as a byproduct avoids us having multiple duplicate copies of the object that are identical # apart from that they were retrieved from the API at different times. Note that the simple Python hash() # function includes a seed randomly generated at runtime; this is therefore not consistent between runs. But we # use diskcache to store our data between runs, so we use SHA256 which does not include this random element. self_copy = copy.deepcopy(self) self_copy.received_time = 0 self_copy.received_time_iso = "" self.id = hashlib.sha256(str(self_copy).encode("utf-8")).hexdigest() def to_json(self): """JSON serialise""" return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True) def expired(self): """Decide if this alert has expired (in which case it should not be added to the system in the first place, and not returned by the web server if later requested, and removed by the cleanup functions). "Expired" is defined as either having an end_time in the past, or if it only has a start_time, then that start time was more than 3 hours ago. If it somehow doesn't have a start_time either, it is considered to be expired.""" return not self.start_time or (self.end_time and self.end_time < datetime.now(pytz.UTC).timestamp()) or ( not self.end_time and self.start_time < (datetime.now(pytz.UTC) - timedelta(hours=3)).timestamp())