Files
spothole/spotproviders/parksnpeaks.py

88 lines
4.3 KiB
Python

import csv
import logging
import re
from datetime import datetime, timedelta
import pytz
from requests_cache import CachedSession
from core.constants import HTTP_HEADERS
from data.spot import Spot
from spotproviders.http_spot_provider import HTTPSpotProvider
# Spot provider for Parks n Peaks
class ParksNPeaks(HTTPSpotProvider):
POLL_INTERVAL_SEC = 120
SPOTS_URL = "https://www.parksnpeaks.org/api/ALL"
SIOTA_LIST_URL = "https://www.silosontheair.com/data/silos.csv"
SIOTA_LIST_CACHE_TIME_DAYS = 30
SIOTA_LIST_CACHE = CachedSession("cache/siota_data_cache", expire_after=timedelta(days=SIOTA_LIST_CACHE_TIME_DAYS))
ZLOTA_LIST_URL = "https://ontheair.nz/assets/assets.json"
ZLOTA_LIST_CACHE_TIME_DAYS = 30
ZLOTA_LIST_CACHE = CachedSession("cache/zlota_data_cache", expire_after=timedelta(days=ZLOTA_LIST_CACHE_TIME_DAYS))
def __init__(self, provider_config):
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
def http_response_to_spots(self, http_response):
new_spots = []
# Iterate through source data
for source_spot in http_response.json():
# Convert to our spot format
spot = Spot(source=self.name,
source_id=source_spot["actID"],
dx_call=source_spot["actCallsign"].upper(),
de_call=source_spot["actSpoter"].upper() if source_spot["actSpoter"] != "" else None, # typo exists in API
freq=float(source_spot["actFreq"].replace(",", "")) * 1000000 if (
source_spot["actFreq"] != "") else None,
# Seen PNP spots with empty frequency, and with comma-separated thousands digits
mode=source_spot["actMode"].upper(),
comment=source_spot["actComments"],
sig=source_spot["actClass"],
sig_refs=[source_spot["actSiteID"]],
time=datetime.strptime(source_spot["actTime"], "%Y-%m-%d %H:%M:%S").replace(
tzinfo=pytz.UTC).timestamp())
# Free text location is not present in all spots, so only add it if it's set
if "actLocation" in source_spot and source_spot["actLocation"] != "":
spot.sig_refs_names = [source_spot["actLocation"]]
# Extract a de_call if it's in the comment but not in the "actSpoter" field
m = re.search(r"\(de ([A-Za-z0-9]*)\)", spot.comment)
if (not spot.de_call or spot.de_call == "ZLOTA") and m is not None:
spot.de_call = m.group(1)
# Log a warning for the developer if PnP gives us an unknown programme we've never seen before
if spot.sig not in ["POTA", "SOTA", "WWFF", "SiOTA", "ZLOTA", "KRMNPA"]:
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to add support for this!")
# SiOTA lat/lon/grid lookup
if spot.sig == "SiOTA":
siota_csv_data = self.SIOTA_LIST_CACHE.get(self.SIOTA_LIST_URL, headers=HTTP_HEADERS)
siota_dr = csv.DictReader(siota_csv_data.content.decode().splitlines())
for row in siota_dr:
if row["SILO_CODE"] == spot.sig_refs[0]:
spot.dx_latitude = float(row["LAT"])
spot.dx_longitude = float(row["LNG"])
spot.dx_grid = row["LOCATOR"]
break
# ZLOTA name/lat/lon lookup
if spot.sig == "ZLOTA":
zlota_data = self.ZLOTA_LIST_CACHE.get(self.ZLOTA_LIST_URL, headers=HTTP_HEADERS).json()
for asset in zlota_data:
if asset["code"] == spot.sig_refs[0]:
spot.sig_refs_names = [asset["name"]]
spot.dx_latitude = asset["y"]
spot.dx_longitude = asset["x"]
break
# Note there is currently no support for KRMNPA location lookup, see issue #61.
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. Otherwise, add to
# the spot list.
if spot.sig not in ["POTA", "SOTA", "WWFF"]:
new_spots.append(spot)
return new_spots