mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2025-10-27 16:59:25 +00:00
77 lines
3.7 KiB
Python
77 lines
3.7 KiB
Python
import csv
|
|
import logging
|
|
import re
|
|
from datetime import datetime, timedelta
|
|
|
|
import pytz
|
|
from requests_cache import CachedSession
|
|
|
|
from core.constants import HTTP_HEADERS
|
|
from core.sig_utils import get_icon_for_sig
|
|
from data.spot import Spot
|
|
from spotproviders.http_spot_provider import HTTPSpotProvider
|
|
|
|
|
|
# Spot provider for Parks n Peaks
|
|
class ParksNPeaks(HTTPSpotProvider):
|
|
POLL_INTERVAL_SEC = 120
|
|
SPOTS_URL = "https://www.parksnpeaks.org/api/ALL"
|
|
SIOTA_LIST_URL = "https://www.silosontheair.com/data/silos.csv"
|
|
SIOTA_LIST_CACHE_TIME_DAYS = 30
|
|
SIOTA_LIST_CACHE = CachedSession("cache/siota_data_cache", expire_after=timedelta(days=SIOTA_LIST_CACHE_TIME_DAYS))
|
|
|
|
def __init__(self, provider_config):
|
|
super().__init__(provider_config, self.SPOTS_URL, self.POLL_INTERVAL_SEC)
|
|
|
|
def http_response_to_spots(self, http_response):
|
|
new_spots = []
|
|
# Iterate through source data
|
|
for source_spot in http_response.json():
|
|
# Convert to our spot format
|
|
spot = Spot(source=self.name,
|
|
source_id=source_spot["actID"],
|
|
dx_call=source_spot["actCallsign"].upper(),
|
|
de_call=source_spot["actSpoter"].upper() if source_spot["actSpoter"] != "" else None, # typo exists in API
|
|
freq=float(source_spot["actFreq"].replace(",", "")) * 1000000 if (
|
|
source_spot["actFreq"] != "") else None,
|
|
# Seen PNP spots with empty frequency, and with comma-separated thousands digits
|
|
mode=source_spot["actMode"].upper(),
|
|
comment=source_spot["actComments"],
|
|
sig=source_spot["actClass"],
|
|
sig_refs=[source_spot["actSiteID"]],
|
|
icon=get_icon_for_sig(source_spot["actClass"]),
|
|
time=datetime.strptime(source_spot["actTime"], "%Y-%m-%d %H:%M:%S").replace(
|
|
tzinfo=pytz.UTC).timestamp())
|
|
|
|
# Free text location is not present in all spots, so only add it if it's set
|
|
if "actLocation" in source_spot and source_spot["actLocation"] != "":
|
|
spot.sig_refs_names = [source_spot["actLocation"]]
|
|
|
|
# Extract a de_call if it's in the comment but not in the "actSpoter" field
|
|
m = re.search(r"\(de ([A-Za-z0-9]*)\)", spot.comment)
|
|
if not spot.de_call and m:
|
|
spot.de_call = m.group(1)
|
|
|
|
# Log a warning for the developer if PnP gives us an unknown programme we've never seen before
|
|
if spot.sig not in ["POTA", "SOTA", "WWFF", "SiOTA", "ZLOTA", "KRMNPA"]:
|
|
logging.warn("PNP spot found with sig " + spot.sig + ", developer needs to add support for this!")
|
|
|
|
# SiOTA lat/lon/grid lookup
|
|
if spot.sig == "SiOTA":
|
|
siota_csv_data = self.SIOTA_LIST_CACHE.get(self.SIOTA_LIST_URL, headers=HTTP_HEADERS)
|
|
siota_dr = csv.DictReader(siota_csv_data.content.decode().splitlines())
|
|
for row in siota_dr:
|
|
if row["SILO_CODE"] == spot.sig_refs[0]:
|
|
spot.dx_latitude = float(row["LAT"])
|
|
spot.dx_longitude = float(row["LNG"])
|
|
spot.dx_grid = row["LOCATOR"]
|
|
break
|
|
|
|
# Note there is currently no support for KRMNPA location lookup, see issue #61.
|
|
|
|
# If this is POTA, SOTA, WWFF or ZLOTA data we already have it through other means, so ignore. Otherwise,
|
|
# add to the spot list.
|
|
if spot.sig not in ["POTA", "SOTA", "WWFF", "ZLOTA"]:
|
|
new_spots.append(spot)
|
|
return new_spots
|