mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2025-12-16 00:53:39 +00:00
Single common URL cache for semi-static lookups #74
This commit is contained in:
10
core/cache_utils.py
Normal file
10
core/cache_utils.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from requests_cache import CachedSession
|
||||
|
||||
# Cache for "semi-static" data such as the locations of parks, CSVs of reference lists, etc.
|
||||
# This has an expiry time of 30 days, so will re-request from the source after that amount
|
||||
# of time has passed. This is used throughout Spothole to cache data that does not change
|
||||
# rapidly.
|
||||
SEMI_STATIC_URL_DATA_CACHE = CachedSession("cache/semi_static_url_data_cache",
|
||||
expire_after=timedelta(days=30))
|
||||
@@ -9,6 +9,7 @@ from pyhamtools.frequency import freq_to_band
|
||||
from pyhamtools.locator import latlong_to_locator
|
||||
from requests_cache import CachedSession
|
||||
|
||||
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
|
||||
from core.config import config
|
||||
from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODES, ALL_MODES, \
|
||||
QRZCQ_CALLSIGN_LOOKUP_DATA, HTTP_HEADERS
|
||||
@@ -34,15 +35,12 @@ class LookupHelper:
|
||||
self.CALL_INFO_BASIC = None
|
||||
self.LOOKUP_LIB_BASIC = None
|
||||
self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION = None
|
||||
self.COUNTRY_FILES_CTY_PLIST_CACHE = None
|
||||
|
||||
def start(self):
|
||||
# Lookup helpers from pyhamtools. We use four (!) of these. The simplest is country-files.com, which downloads the data
|
||||
# once on startup, and requires no login/key, but does not have the best coverage.
|
||||
# If the user provides login details/API keys, we also set up helpers for QRZ.com, Clublog (live API request), and
|
||||
# Clublog (XML download). The lookup functions iterate through these in a sensible order, looking for suitable data.
|
||||
self.COUNTRY_FILES_CTY_PLIST_CACHE = CachedSession("cache/country_files_city_plist_cache",
|
||||
expire_after=timedelta(days=10))
|
||||
self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION = "cache/cty.plist"
|
||||
success = self.download_country_files_cty_plist()
|
||||
if success:
|
||||
@@ -78,7 +76,7 @@ class LookupHelper:
|
||||
def download_country_files_cty_plist(self):
|
||||
try:
|
||||
logging.info("Downloading Country-files.com cty.plist...")
|
||||
response = self.COUNTRY_FILES_CTY_PLIST_CACHE.get("https://www.country-files.com/cty/cty.plist",
|
||||
response = SEMI_STATIC_URL_DATA_CACHE.get("https://www.country-files.com/cty/cty.plist",
|
||||
headers=HTTP_HEADERS).text
|
||||
|
||||
with open(self.COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION, "w") as f:
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import csv
|
||||
from datetime import timedelta
|
||||
|
||||
from pyhamtools.locator import latlong_to_locator
|
||||
from requests_cache import CachedSession
|
||||
|
||||
from core.cache_utils import SEMI_STATIC_URL_DATA_CACHE
|
||||
from core.constants import SIGS, HTTP_HEADERS
|
||||
from core.geo_utils import wab_wai_square_to_lat_lon
|
||||
|
||||
@@ -22,43 +21,38 @@ def get_ref_regex_for_sig(sig):
|
||||
return s.ref_regex
|
||||
return None
|
||||
|
||||
# Cache for SIG ref lookups
|
||||
SIG_REF_DATA_CACHE_TIME_DAYS = 30
|
||||
SIG_REF_DATA_CACHE = CachedSession("cache/sig_ref_lookup_cache",
|
||||
expire_after=timedelta(days=SIG_REF_DATA_CACHE_TIME_DAYS))
|
||||
|
||||
# Look up details of a SIG reference (e.g. POTA park) such as name, lat/lon, and grid.
|
||||
def get_sig_ref_info(sig, sig_ref_id):
|
||||
if sig.upper() == "POTA":
|
||||
data = SIG_REF_DATA_CACHE.get("https://api.pota.app/park/" + sig_ref_id, headers=HTTP_HEADERS).json()
|
||||
data = SEMI_STATIC_URL_DATA_CACHE.get("https://api.pota.app/park/" + sig_ref_id, headers=HTTP_HEADERS).json()
|
||||
if data:
|
||||
return {"name": data["name"] if "name" in data else None,
|
||||
"grid": data["grid6"] if "grid6" in data else None,
|
||||
"latitude": data["latitude"] if "latitude" in data else None,
|
||||
"longitude": data["longitude"] if "longitude" in data else None}
|
||||
elif sig.upper() == "SOTA":
|
||||
data = SIG_REF_DATA_CACHE.get("https://api-db2.sota.org.uk/api/summits/" + sig_ref_id, headers=HTTP_HEADERS).json()
|
||||
data = SEMI_STATIC_URL_DATA_CACHE.get("https://api-db2.sota.org.uk/api/summits/" + sig_ref_id, headers=HTTP_HEADERS).json()
|
||||
if data:
|
||||
return {"name": data["name"] if "name" in data else None,
|
||||
"grid": data["locator"] if "locator" in data else None,
|
||||
"latitude": data["latitude"] if "latitude" in data else None,
|
||||
"longitude": data["longitude"] if "longitude" in data else None}
|
||||
elif sig.upper() == "WWBOTA":
|
||||
data = SIG_REF_DATA_CACHE.get("https://api.wwbota.org/bunkers/" + sig_ref_id, headers=HTTP_HEADERS).json()
|
||||
data = SEMI_STATIC_URL_DATA_CACHE.get("https://api.wwbota.org/bunkers/" + sig_ref_id, headers=HTTP_HEADERS).json()
|
||||
if data:
|
||||
return {"name": data["name"] if "name" in data else None,
|
||||
"grid": data["locator"] if "locator" in data else None,
|
||||
"latitude": data["lat"] if "lat" in data else None,
|
||||
"longitude": data["long"] if "long" in data else None}
|
||||
elif sig.upper() == "GMA" or sig.upper() == "ARLHS" or sig.upper() == "ILLW" or sig.upper() == "WCA" or sig.upper() == "MOTA" or sig.upper() == "IOTA":
|
||||
data = SIG_REF_DATA_CACHE.get("https://www.cqgma.org/api/ref/?" + sig_ref_id, headers=HTTP_HEADERS).json()
|
||||
data = SEMI_STATIC_URL_DATA_CACHE.get("https://www.cqgma.org/api/ref/?" + sig_ref_id, headers=HTTP_HEADERS).json()
|
||||
if data:
|
||||
return {"name": data["name"] if "name" in data else None,
|
||||
"grid": data["locator"] if "locator" in data else None,
|
||||
"latitude": data["latitude"] if "latitude" in data else None,
|
||||
"longitude": data["longitude"] if "longitude" in data else None}
|
||||
elif sig.upper() == "SIOTA":
|
||||
siota_csv_data = SIG_REF_DATA_CACHE.get("https://www.silosontheair.com/data/silos.csv", headers=HTTP_HEADERS)
|
||||
siota_csv_data = SEMI_STATIC_URL_DATA_CACHE.get("https://www.silosontheair.com/data/silos.csv", headers=HTTP_HEADERS)
|
||||
siota_dr = csv.DictReader(siota_csv_data.content.decode().splitlines())
|
||||
for row in siota_dr:
|
||||
if row["SILO_CODE"] == sig_ref_id:
|
||||
@@ -67,7 +61,7 @@ def get_sig_ref_info(sig, sig_ref_id):
|
||||
"latitude": float(row["LAT"]) if "LAT" in row else None,
|
||||
"longitude": float(row["LNG"]) if "LNG" in row else None}
|
||||
elif sig.upper() == "WOTA":
|
||||
data = SIG_REF_DATA_CACHE.get("https://www.wota.org.uk/mapping/data/summits.json", headers=HTTP_HEADERS).json()
|
||||
data = SEMI_STATIC_URL_DATA_CACHE.get("https://www.wota.org.uk/mapping/data/summits.json", headers=HTTP_HEADERS).json()
|
||||
if data:
|
||||
for feature in data["features"]:
|
||||
if feature["properties"]["wotaId"] == sig_ref_id:
|
||||
@@ -76,7 +70,7 @@ def get_sig_ref_info(sig, sig_ref_id):
|
||||
"latitude": feature["geometry"]["coordinates"][1],
|
||||
"longitude": feature["geometry"]["coordinates"][0]}
|
||||
elif sig.upper() == "ZLOTA":
|
||||
data = SIG_REF_DATA_CACHE.get("https://ontheair.nz/assets/assets.json", headers=HTTP_HEADERS).json()
|
||||
data = SEMI_STATIC_URL_DATA_CACHE.get("https://ontheair.nz/assets/assets.json", headers=HTTP_HEADERS).json()
|
||||
if data:
|
||||
for asset in data:
|
||||
if asset["code"] == sig_ref_id:
|
||||
|
||||
Reference in New Issue
Block a user