Only fill missing info if spot passes the timestamp check in submit(). Closes #14

This commit is contained in:
Ian Renton
2025-09-29 19:18:20 +01:00
parent 89a8db579e
commit cd575e7ed2
11 changed files with 17 additions and 23 deletions

View File

@@ -51,8 +51,7 @@ class APRSIS(Provider):
latitude=data["latitude"] if "latitude" in data else None, latitude=data["latitude"] if "latitude" in data else None,
longitude=data["longitude"] if "longitude" in data else None, longitude=data["longitude"] if "longitude" in data else None,
time=datetime.now(pytz.UTC)) # APRS-IS spots are live so we can assume spot time is "now" time=datetime.now(pytz.UTC)) # APRS-IS spots are live so we can assume spot time is "now"
# Fill in any blanks
spot.infer_missing()
# Add to our list # Add to our list
self.submit(spot) self.submit(spot)
print(spot) print(spot)

View File

@@ -73,8 +73,7 @@ class DXCluster(Provider):
freq=float(match.group(2)), freq=float(match.group(2)),
comment=match.group(4).strip(), comment=match.group(4).strip(),
time=spot_datetime) time=spot_datetime)
# Fill in any blanks
spot.infer_missing()
# Add to our list # Add to our list
self.submit(spot) self.submit(spot)

View File

@@ -46,8 +46,6 @@ class GMA(HTTPProvider):
ref_info = ref_response.json() ref_info = ref_response.json()
spot.sig = ref_info["reftype"] spot.sig = ref_info["reftype"]
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us. # that for us.
new_spots.append(spot) new_spots.append(spot)

View File

@@ -61,8 +61,6 @@ class HEMA(HTTPProvider):
latitude=float(spot_items[7]), latitude=float(spot_items[7]),
longitude=float(spot_items[8])) longitude=float(spot_items[8]))
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us. # that for us.
new_spots.append(spot) new_spots.append(spot)

View File

@@ -37,8 +37,6 @@ class ParksNPeaks(HTTPProvider):
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. # If this is POTA, SOTA or WWFF data we already have it through other means, so ignore.
if spot.sig not in ["POTA", "SOTA", "WWFF"]: if spot.sig not in ["POTA", "SOTA", "WWFF"]:
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!") logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!")
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us. # that for us.
new_spots.append(spot) new_spots.append(spot)

View File

@@ -36,8 +36,7 @@ class POTA(HTTPProvider):
grid=source_spot["grid6"], grid=source_spot["grid6"],
latitude=source_spot["latitude"], latitude=source_spot["latitude"],
longitude=source_spot["longitude"]) longitude=source_spot["longitude"])
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us. # that for us.
new_spots.append(spot) new_spots.append(spot)

View File

@@ -32,17 +32,25 @@ class Provider:
raise NotImplementedError("Subclasses must implement this method") raise NotImplementedError("Subclasses must implement this method")
# Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved # Submit a batch of spots retrieved from the provider. Only spots that are newer than the last spot retrieved
# by this provider will be added to the spot list, to prevent duplications. This is called by the API-querying # by this provider will be added to the spot list, to prevent duplications. Spots passing the check will also have
# their infer_missing() method called to complete their data set. This is called by the API-querying
# subclasses on receiving spots. # subclasses on receiving spots.
def submit_batch(self, spots): def submit_batch(self, spots):
for spot in spots: for spot in spots:
if spot.time > self.last_spot_time: if spot.time > self.last_spot_time:
# Fill in any blanks
spot.infer_missing()
# Append to the list
self.spot_list.append(spot) self.spot_list.append(spot)
self.last_spot_time = max(map(lambda s: s.time, spots)) self.last_spot_time = max(map(lambda s: s.time, spots))
# Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. This is # Submit a single spot retrieved from the provider. This will be added to the list regardless of its age. Spots
# called by the data streaming subclasses, which can be relied upon not to re-provide old spots. # passing the check will also have their infer_missing() method called to complete their data set. This is called by
# the data streaming subclasses, which can be relied upon not to re-provide old spots.
def submit(self, spot): def submit(self, spot):
# Fill in any blanks
spot.infer_missing()
# Append to the list
self.spot_list.append(spot) self.spot_list.append(spot)
self.last_spot_time = spot.time self.last_spot_time = spot.time

View File

@@ -74,8 +74,7 @@ class RBN(Provider):
freq=float(match.group(2)), freq=float(match.group(2)),
comment=match.group(4).strip(), comment=match.group(4).strip(),
time=spot_datetime) time=spot_datetime)
# Fill in any blanks
spot.infer_missing()
# Add to our list # Add to our list
self.submit(spot) self.submit(spot)

View File

@@ -60,8 +60,6 @@ class SOTA(HTTPProvider):
spot.latitude = summit_data["latitude"] spot.latitude = summit_data["latitude"]
spot.longitude = summit_data["longitude"] spot.longitude = summit_data["longitude"]
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us. # that for us.
new_spots.append(spot) new_spots.append(spot)

View File

@@ -42,8 +42,7 @@ class WWBOTA(HTTPProvider):
latitude=source_spot["references"][0]["lat"], latitude=source_spot["references"][0]["lat"],
longitude=source_spot["references"][0]["long"], longitude=source_spot["references"][0]["long"],
qrt=source_spot["type"] == "QRT") qrt=source_spot["type"] == "QRT")
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us. But WWBOTA does support a special "Test" spot type, we need to avoid adding that. # that for us. But WWBOTA does support a special "Test" spot type, we need to avoid adding that.
if source_spot["type"] != "Test": if source_spot["type"] != "Test":

View File

@@ -35,8 +35,7 @@ class WWFF(HTTPProvider):
time=datetime.fromtimestamp(source_spot["spot_time"]).replace(tzinfo=pytz.UTC), time=datetime.fromtimestamp(source_spot["spot_time"]).replace(tzinfo=pytz.UTC),
latitude=source_spot["latitude"], latitude=source_spot["latitude"],
longitude=source_spot["longitude"]) longitude=source_spot["longitude"])
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do # Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us. # that for us.
new_spots.append(spot) new_spots.append(spot)