mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2025-10-27 08:49:27 +00:00
Add data providers for most other programmes
This commit is contained in:
68
providers/sota.py
Normal file
68
providers/sota.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import requests
|
||||
from requests_cache import CachedSession
|
||||
|
||||
from data.spot import Spot
|
||||
from providers.http_provider import HTTPProvider
|
||||
|
||||
|
||||
# Provider for Summits on the Air
|
||||
class SOTA(HTTPProvider):
|
||||
POLL_INTERVAL_SEC = 120
|
||||
# SOTA wants us to check for an "epoch" from the API and see if it's actually changed before querying the main data
|
||||
# APIs. So it's actually the EPOCH_URL that we pass into the constructor and get the superclass to call on a timer.
|
||||
# The actual data lookup all happens after parsing and checking the epoch.
|
||||
EPOCH_URL = "https://api-db2.sota.org.uk/api/spots/epoch"
|
||||
SPOTS_URL = "https://api-db2.sota.org.uk/api/spots/60/all/all"
|
||||
# SOTA spots don't contain lat/lon, we need a separate lookup for that
|
||||
SUMMIT_URL_ROOT = "https://api-db2.sota.org.uk/api/summits/"
|
||||
SUMMIT_DATA_CACHE_TIME_DAYS = 30
|
||||
SUMMIT_DATA_CACHE = CachedSession("sota_summit_data_cache", expire_after=timedelta(days=SUMMIT_DATA_CACHE_TIME_DAYS))
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(self.EPOCH_URL, self.POLL_INTERVAL_SEC)
|
||||
self.api_epoch = ""
|
||||
|
||||
def name(self):
|
||||
return "SOTA"
|
||||
|
||||
def http_response_to_spots(self, http_response):
|
||||
# OK, source data is actually just the epoch at this point. We'll then go on to fetch real data if we know this
|
||||
# has changed.
|
||||
epoch_changed = http_response.text != self.api_epoch
|
||||
self.api_epoch = http_response.text
|
||||
|
||||
new_spots = []
|
||||
# OK, if the epoch actually changed, now we make the real request for data.
|
||||
if epoch_changed:
|
||||
source_data = requests.get(self.SPOTS_URL, headers=self.HTTP_HEADERS).json()
|
||||
# Iterate through source data
|
||||
for source_spot in source_data:
|
||||
# Convert to our spot format
|
||||
spot = Spot(source=self.name(),
|
||||
source_id=source_spot["id"],
|
||||
dx_call=source_spot["activatorCallsign"].upper(),
|
||||
dx_name=source_spot["activatorName"],
|
||||
de_call=source_spot["callsign"].upper(),
|
||||
freq=(float(source_spot["frequency"]) * 1000) if (source_spot["frequency"] is not None) else None, # Seen SOTA spots with no frequency!
|
||||
mode=source_spot["mode"].upper(),
|
||||
comment=source_spot["comments"],
|
||||
sig="SOTA",
|
||||
sig_refs=[source_spot["summitCode"]],
|
||||
sig_refs_names=[source_spot["summitName"]],
|
||||
time=datetime.fromisoformat(source_spot["timeStamp"]),
|
||||
activation_score=source_spot["points"])
|
||||
|
||||
# SOTA doesn't give summit lat/lon/grid in the main call, so we need another separate call for this
|
||||
summit_data = self.SUMMIT_DATA_CACHE.get(self.SUMMIT_URL_ROOT + source_spot["summitCode"], headers=self.HTTP_HEADERS).json()
|
||||
spot.grid = summit_data["locator"]
|
||||
spot.latitude = summit_data["latitude"]
|
||||
spot.longitude = summit_data["longitude"]
|
||||
|
||||
# Fill in any missing data
|
||||
spot.infer_missing()
|
||||
# Add to our list. Don't worry about de-duping, removing old spots etc. at this point; other code will do
|
||||
# that for us.
|
||||
new_spots.append(spot)
|
||||
return new_spots
|
||||
Reference in New Issue
Block a user