Files
spothole/providers/parksnpeaks.py
2025-09-27 14:27:39 +01:00

45 lines
1.9 KiB
Python

import logging
from datetime import datetime
import pytz
from data.spot import Spot
from providers.http_provider import HTTPProvider
# Provider for Parks n Peaks
class ParksNPeaks(HTTPProvider):
POLL_INTERVAL_SEC = 120
SPOTS_URL = "https://www.parksnpeaks.org/api/ALL"
def __init__(self):
super().__init__(self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def name(self):
return "ParksNPeaks"
def http_response_to_spots(self, http_response):
new_spots = []
# Iterate through source data
for source_spot in http_response.json():
# Convert to our spot format
spot = Spot(source=self.name(),
source_id=source_spot["actID"],
dx_call=source_spot["actCallsign"].upper(),
de_call=source_spot["actSpoter"].upper(), # typo exists in API
freq=float(source_spot["actFreq"]) * 1000 if (source_spot["actFreq"] != "") else None, # Seen PNP spots with empty frequency!
mode=source_spot["actMode"].upper(),
comment=source_spot["actComments"],
sig=source_spot["actClass"],
sig_refs=[source_spot["actSiteID"]],
time=datetime.strptime(source_spot["actTime"], "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC))
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore.
if spot.sig not in ["POTA", "SOTA", "WWFF"]:
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to figure out how to look this up for grid/lat/lon!")
# Fill in any missing data
spot.infer_missing()
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
# that for us.
new_spots.append(spot)
return new_spots