mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2025-10-27 08:49:27 +00:00
Download cty.plist separately so errors can be better handled
This commit is contained in:
@@ -9,9 +9,6 @@ from core.constants import SOFTWARE_NAME, SOFTWARE_VERSION
|
|||||||
# Generic alert provider class. Subclasses of this query the individual APIs for alerts.
|
# Generic alert provider class. Subclasses of this query the individual APIs for alerts.
|
||||||
class AlertProvider:
|
class AlertProvider:
|
||||||
|
|
||||||
# HTTP headers used for spot providers that use HTTP
|
|
||||||
HTTP_HEADERS = { "User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION + " (operated by " + SERVER_OWNER_CALLSIGN + ")" }
|
|
||||||
|
|
||||||
# Constructor
|
# Constructor
|
||||||
def __init__(self, provider_config):
|
def __init__(self, provider_config):
|
||||||
self.name = provider_config["name"]
|
self.name = provider_config["name"]
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import pytz
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
from alertproviders.alert_provider import AlertProvider
|
from alertproviders.alert_provider import AlertProvider
|
||||||
|
from core.constants import HTTP_HEADERS
|
||||||
|
|
||||||
|
|
||||||
# Generic alert provider class for providers that request data via HTTP(S). Just for convenience to avoid code
|
# Generic alert provider class for providers that request data via HTTP(S). Just for convenience to avoid code
|
||||||
@@ -35,7 +36,7 @@ class HTTPAlertProvider(AlertProvider):
|
|||||||
try:
|
try:
|
||||||
# Request data from API
|
# Request data from API
|
||||||
logging.debug("Polling " + self.name + " alert API...")
|
logging.debug("Polling " + self.name + " alert API...")
|
||||||
http_response = requests.get(self.url, headers=self.HTTP_HEADERS)
|
http_response = requests.get(self.url, headers=HTTP_HEADERS)
|
||||||
# Pass off to the subclass for processing
|
# Pass off to the subclass for processing
|
||||||
new_alerts = self.http_response_to_alerts(http_response)
|
new_alerts = self.http_response_to_alerts(http_response)
|
||||||
# Submit the new alerts for processing. There might not be any alerts for the less popular programs.
|
# Submit the new alerts for processing. There might not be any alerts for the less popular programs.
|
||||||
|
|||||||
@@ -4,6 +4,9 @@ from data.band import Band
|
|||||||
SOFTWARE_NAME = "Spothole by M0TRT"
|
SOFTWARE_NAME = "Spothole by M0TRT"
|
||||||
SOFTWARE_VERSION = "0.1"
|
SOFTWARE_VERSION = "0.1"
|
||||||
|
|
||||||
|
# HTTP headers used for spot providers that use HTTP
|
||||||
|
HTTP_HEADERS = {"User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION + " (operated by " + SERVER_OWNER_CALLSIGN + ")"}
|
||||||
|
|
||||||
# Special Interest Groups
|
# Special Interest Groups
|
||||||
SIGS = ["POTA", "SOTA", "WWFF", "GMA", "WWBOTA", "HEMA", "MOTA", "ARLHS", "SiOTA", "WCA"]
|
SIGS = ["POTA", "SOTA", "WWFF", "GMA", "WWBOTA", "HEMA", "MOTA", "ARLHS", "SiOTA", "WCA"]
|
||||||
|
|
||||||
|
|||||||
@@ -1,26 +1,46 @@
|
|||||||
import gzip
|
import gzip
|
||||||
import logging
|
import logging
|
||||||
import urllib.request
|
from datetime import timedelta
|
||||||
|
|
||||||
from diskcache import Cache
|
from diskcache import Cache
|
||||||
from pyhamtools import LookupLib, Callinfo
|
from pyhamtools import LookupLib, Callinfo
|
||||||
from pyhamtools.exceptions import APIKeyMissingError
|
from pyhamtools.exceptions import APIKeyMissingError
|
||||||
from pyhamtools.frequency import freq_to_band
|
from pyhamtools.frequency import freq_to_band
|
||||||
from pyhamtools.locator import latlong_to_locator
|
from pyhamtools.locator import latlong_to_locator
|
||||||
|
from requests_cache import CachedSession
|
||||||
|
|
||||||
from core.config import config
|
from core.config import config
|
||||||
from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODES, ALL_MODES, QRZCQ_CALLSIGN_LOOKUP_DATA
|
from core.constants import BANDS, UNKNOWN_BAND, CW_MODES, PHONE_MODES, DATA_MODES, ALL_MODES, \
|
||||||
|
QRZCQ_CALLSIGN_LOOKUP_DATA, HTTP_HEADERS
|
||||||
|
|
||||||
CLUBLOG_API_KEY = config["clublog-api-key"]
|
|
||||||
|
# Download the cty.plist file from country-files.com on first startup. The pyhamtools lib can actually download and use
|
||||||
|
# this itself, but it's occasionally offline which causes it to throw an error. By downloading it separately, we can
|
||||||
|
# catch errors and handle them, falling back to a previous copy of the file in the cache, and we can use the
|
||||||
|
# requests_cache library to prevent re-downloading too quickly if the software keeps restarting.
|
||||||
|
def download_country_files_cty_plist():
|
||||||
|
try:
|
||||||
|
response = COUNTRY_FILES_CTY_PLIST_CACHE.get("https://www.country-files.com/cty/cty.plist",
|
||||||
|
headers=HTTP_HEADERS).text
|
||||||
|
|
||||||
|
# write to file
|
||||||
|
with open(COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION, "w") as f:
|
||||||
|
f.write(response)
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error("Exception when downloading Clublog cty.xml", e)
|
||||||
|
return False
|
||||||
|
|
||||||
# Download the cty.xml (gzipped) file from Clublog on first startup, so we can use it in preference to querying the
|
# Download the cty.xml (gzipped) file from Clublog on first startup, so we can use it in preference to querying the
|
||||||
# database live if possible.
|
# database live if possible.
|
||||||
def download_clublog_ctyxml():
|
def download_clublog_ctyxml():
|
||||||
try:
|
try:
|
||||||
# Read the file inside the .gz archive located at url
|
# Read the file inside the .gz archive located at url
|
||||||
with urllib.request.urlopen("https://cdn.clublog.org/cty.php?api=" + CLUBLOG_API_KEY) as response:
|
response = CLUBLOG_CTY_XML_CACHE.get("https://cdn.clublog.org/cty.php?api=" + CLUBLOG_API_KEY,
|
||||||
with gzip.GzipFile(fileobj=response) as uncompressed:
|
headers=HTTP_HEADERS).raw
|
||||||
file_content = uncompressed.read()
|
with gzip.GzipFile(fileobj=response) as uncompressed:
|
||||||
|
file_content = uncompressed.read()
|
||||||
|
|
||||||
# write to file in binary mode 'wb'
|
# write to file in binary mode 'wb'
|
||||||
with open(CLUBLOG_XML_DOWNLOAD_LOCATION, "wb") as f:
|
with open(CLUBLOG_XML_DOWNLOAD_LOCATION, "wb") as f:
|
||||||
@@ -35,12 +55,18 @@ def download_clublog_ctyxml():
|
|||||||
# once on startup, and requires no login/key, but does not have the best coverage.
|
# once on startup, and requires no login/key, but does not have the best coverage.
|
||||||
# If the user provides login details/API keys, we also set up helpers for QRZ.com, Clublog (live API request), and
|
# If the user provides login details/API keys, we also set up helpers for QRZ.com, Clublog (live API request), and
|
||||||
# Clublog (XML download). The lookup functions iterate through these in a sensible order, looking for suitable data.
|
# Clublog (XML download). The lookup functions iterate through these in a sensible order, looking for suitable data.
|
||||||
LOOKUP_LIB_BASIC = LookupLib(lookuptype="countryfile")
|
COUNTRY_FILES_CTY_PLIST_CACHE = CachedSession("cache/country_files_city_plist_cache", expire_after=timedelta(days=10))
|
||||||
|
COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION = "cache/cty.plist"
|
||||||
|
LOOKUP_LIB_BASIC = LookupLib(lookuptype="countryfile", filename=COUNTRY_FILES_CTY_PLIST_DOWNLOAD_LOCATION)
|
||||||
CALL_INFO_BASIC = Callinfo(LOOKUP_LIB_BASIC)
|
CALL_INFO_BASIC = Callinfo(LOOKUP_LIB_BASIC)
|
||||||
|
|
||||||
QRZ_AVAILABLE = config["qrz-password"] != ""
|
QRZ_AVAILABLE = config["qrz-password"] != ""
|
||||||
if QRZ_AVAILABLE:
|
if QRZ_AVAILABLE:
|
||||||
LOOKUP_LIB_QRZ = LookupLib(lookuptype="qrz", username=config["qrz-username"], pwd=config["qrz-password"])
|
LOOKUP_LIB_QRZ = LookupLib(lookuptype="qrz", username=config["qrz-username"], pwd=config["qrz-password"])
|
||||||
QRZ_CALLSIGN_DATA_CACHE = Cache('cache/qrz_callsign_lookup_cache')
|
QRZ_CALLSIGN_DATA_CACHE = Cache('cache/qrz_callsign_lookup_cache')
|
||||||
|
|
||||||
|
CLUBLOG_API_KEY = config["clublog-api-key"]
|
||||||
|
CLUBLOG_CTY_XML_CACHE = CachedSession("cache/clublog_cty_xml_cache", expire_after=timedelta(days=10))
|
||||||
CLUBLOG_API_AVAILABLE = CLUBLOG_API_KEY != ""
|
CLUBLOG_API_AVAILABLE = CLUBLOG_API_KEY != ""
|
||||||
CLUBLOG_XML_DOWNLOAD_LOCATION = "cache/cty.xml"
|
CLUBLOG_XML_DOWNLOAD_LOCATION = "cache/cty.xml"
|
||||||
if CLUBLOG_API_AVAILABLE:
|
if CLUBLOG_API_AVAILABLE:
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from datetime import datetime, timedelta
|
|||||||
import pytz
|
import pytz
|
||||||
from requests_cache import CachedSession
|
from requests_cache import CachedSession
|
||||||
|
|
||||||
|
from core.constants import HTTP_HEADERS
|
||||||
from data.spot import Spot
|
from data.spot import Spot
|
||||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||||
|
|
||||||
@@ -43,7 +44,7 @@ class GMA(HTTPSpotProvider):
|
|||||||
|
|
||||||
# GMA doesn't give what programme (SIG) the reference is for until we separately look it up.
|
# GMA doesn't give what programme (SIG) the reference is for until we separately look it up.
|
||||||
ref_response = self.REF_INFO_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"],
|
ref_response = self.REF_INFO_CACHE.get(self.REF_INFO_URL_ROOT + source_spot["REF"],
|
||||||
headers=self.HTTP_HEADERS)
|
headers=HTTP_HEADERS)
|
||||||
# Sometimes this is blank, so handle that
|
# Sometimes this is blank, so handle that
|
||||||
if ref_response.text is not None and ref_response.text != "":
|
if ref_response.text is not None and ref_response.text != "":
|
||||||
ref_info = ref_response.json()
|
ref_info = ref_response.json()
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
import re
|
import re
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
import requests
|
import requests
|
||||||
from requests_cache import CachedSession
|
|
||||||
|
|
||||||
|
from core.constants import HTTP_HEADERS
|
||||||
from data.spot import Spot
|
from data.spot import Spot
|
||||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ class HEMA(HTTPSpotProvider):
|
|||||||
new_spots = []
|
new_spots = []
|
||||||
# OK, if the spot seed actually changed, now we make the real request for data.
|
# OK, if the spot seed actually changed, now we make the real request for data.
|
||||||
if spot_seed_changed:
|
if spot_seed_changed:
|
||||||
source_data = requests.get(self.SPOTS_URL, headers=self.HTTP_HEADERS)
|
source_data = requests.get(self.SPOTS_URL, headers=HTTP_HEADERS)
|
||||||
source_data_items = source_data.text.split("=")
|
source_data_items = source_data.text.split("=")
|
||||||
# Iterate through source data items.
|
# Iterate through source data items.
|
||||||
for source_spot in source_data_items:
|
for source_spot in source_data_items:
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ from time import sleep
|
|||||||
import pytz
|
import pytz
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from core.constants import HTTP_HEADERS
|
||||||
from spotproviders.spot_provider import SpotProvider
|
from spotproviders.spot_provider import SpotProvider
|
||||||
|
|
||||||
|
|
||||||
@@ -35,7 +36,7 @@ class HTTPSpotProvider(SpotProvider):
|
|||||||
try:
|
try:
|
||||||
# Request data from API
|
# Request data from API
|
||||||
logging.debug("Polling " + self.name + " spot API...")
|
logging.debug("Polling " + self.name + " spot API...")
|
||||||
http_response = requests.get(self.url, headers=self.HTTP_HEADERS)
|
http_response = requests.get(self.url, headers=HTTP_HEADERS)
|
||||||
# Pass off to the subclass for processing
|
# Pass off to the subclass for processing
|
||||||
new_spots = self.http_response_to_spots(http_response)
|
new_spots = self.http_response_to_spots(http_response)
|
||||||
# Submit the new spots for processing. There might not be any spots for the less popular programs.
|
# Submit the new spots for processing. There might not be any spots for the less popular programs.
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from datetime import datetime, timedelta
|
|||||||
import pytz
|
import pytz
|
||||||
from requests_cache import CachedSession
|
from requests_cache import CachedSession
|
||||||
|
|
||||||
|
from core.constants import HTTP_HEADERS
|
||||||
from data.spot import Spot
|
from data.spot import Spot
|
||||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||||
|
|
||||||
@@ -58,7 +59,7 @@ class ParksNPeaks(HTTPSpotProvider):
|
|||||||
|
|
||||||
# SiOTA lat/lon/grid lookup
|
# SiOTA lat/lon/grid lookup
|
||||||
if spot.sig == "SiOTA":
|
if spot.sig == "SiOTA":
|
||||||
siota_csv_data = self.SIOTA_LIST_CACHE.get(self.SIOTA_LIST_URL, headers=self.HTTP_HEADERS)
|
siota_csv_data = self.SIOTA_LIST_CACHE.get(self.SIOTA_LIST_URL, headers=HTTP_HEADERS)
|
||||||
siota_dr = csv.DictReader(siota_csv_data.content.decode().splitlines())
|
siota_dr = csv.DictReader(siota_csv_data.content.decode().splitlines())
|
||||||
for row in siota_dr:
|
for row in siota_dr:
|
||||||
if row["SILO_CODE"] == spot.sig_refs[0]:
|
if row["SILO_CODE"] == spot.sig_refs[0]:
|
||||||
@@ -69,7 +70,7 @@ class ParksNPeaks(HTTPSpotProvider):
|
|||||||
|
|
||||||
# ZLOTA name/lat/lon lookup
|
# ZLOTA name/lat/lon lookup
|
||||||
if spot.sig == "ZLOTA":
|
if spot.sig == "ZLOTA":
|
||||||
zlota_data = self.ZLOTA_LIST_CACHE.get(self.ZLOTA_LIST_URL, headers=self.HTTP_HEADERS).json()
|
zlota_data = self.ZLOTA_LIST_CACHE.get(self.ZLOTA_LIST_URL, headers=HTTP_HEADERS).json()
|
||||||
for asset in zlota_data:
|
for asset in zlota_data:
|
||||||
if asset["code"] == spot.sig_refs[0]:
|
if asset["code"] == spot.sig_refs[0]:
|
||||||
spot.sig_refs_names = [asset["name"]]
|
spot.sig_refs_names = [asset["name"]]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from datetime import datetime, timedelta
|
|||||||
import requests
|
import requests
|
||||||
from requests_cache import CachedSession
|
from requests_cache import CachedSession
|
||||||
|
|
||||||
|
from core.constants import HTTP_HEADERS
|
||||||
from data.spot import Spot
|
from data.spot import Spot
|
||||||
from spotproviders.http_spot_provider import HTTPSpotProvider
|
from spotproviders.http_spot_provider import HTTPSpotProvider
|
||||||
|
|
||||||
@@ -34,7 +35,7 @@ class SOTA(HTTPSpotProvider):
|
|||||||
new_spots = []
|
new_spots = []
|
||||||
# OK, if the epoch actually changed, now we make the real request for data.
|
# OK, if the epoch actually changed, now we make the real request for data.
|
||||||
if epoch_changed:
|
if epoch_changed:
|
||||||
source_data = requests.get(self.SPOTS_URL, headers=self.HTTP_HEADERS).json()
|
source_data = requests.get(self.SPOTS_URL, headers=HTTP_HEADERS).json()
|
||||||
# Iterate through source data
|
# Iterate through source data
|
||||||
for source_spot in source_data:
|
for source_spot in source_data:
|
||||||
# Convert to our spot format
|
# Convert to our spot format
|
||||||
@@ -55,7 +56,7 @@ class SOTA(HTTPSpotProvider):
|
|||||||
|
|
||||||
# SOTA doesn't give summit lat/lon/grid in the main call, so we need another separate call for this
|
# SOTA doesn't give summit lat/lon/grid in the main call, so we need another separate call for this
|
||||||
try:
|
try:
|
||||||
summit_response = self.SUMMIT_DATA_CACHE.get(self.SUMMIT_URL_ROOT + source_spot["summitCode"], headers=self.HTTP_HEADERS)
|
summit_response = self.SUMMIT_DATA_CACHE.get(self.SUMMIT_URL_ROOT + source_spot["summitCode"], headers=HTTP_HEADERS)
|
||||||
summit_data = summit_response.json()
|
summit_data = summit_response.json()
|
||||||
spot.grid = summit_data["locator"]
|
spot.grid = summit_data["locator"]
|
||||||
spot.latitude = summit_data["latitude"]
|
spot.latitude = summit_data["latitude"]
|
||||||
|
|||||||
@@ -9,9 +9,6 @@ from core.config import SERVER_OWNER_CALLSIGN, MAX_SPOT_AGE
|
|||||||
# Generic spot provider class. Subclasses of this query the individual APIs for data.
|
# Generic spot provider class. Subclasses of this query the individual APIs for data.
|
||||||
class SpotProvider:
|
class SpotProvider:
|
||||||
|
|
||||||
# HTTP headers used for spot providers that use HTTP
|
|
||||||
HTTP_HEADERS = { "User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION + " (operated by " + SERVER_OWNER_CALLSIGN + ")" }
|
|
||||||
|
|
||||||
# Constructor
|
# Constructor
|
||||||
def __init__(self, provider_config):
|
def __init__(self, provider_config):
|
||||||
self.name = provider_config["name"]
|
self.name = provider_config["name"]
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ from time import sleep
|
|||||||
import pytz
|
import pytz
|
||||||
from requests_sse import EventSource
|
from requests_sse import EventSource
|
||||||
|
|
||||||
|
from core.constants import HTTP_HEADERS
|
||||||
from spotproviders.spot_provider import SpotProvider
|
from spotproviders.spot_provider import SpotProvider
|
||||||
|
|
||||||
# Spot provider using Server-Sent Events.
|
# Spot provider using Server-Sent Events.
|
||||||
@@ -37,7 +38,7 @@ class SSESpotProvider(SpotProvider):
|
|||||||
while not self.stopped:
|
while not self.stopped:
|
||||||
try:
|
try:
|
||||||
logging.debug("Connecting to " + self.name + " spot API...")
|
logging.debug("Connecting to " + self.name + " spot API...")
|
||||||
with EventSource(self.url, headers=self.HTTP_HEADERS, latest_event_id=self.last_event_id, timeout=30) as event_source:
|
with EventSource(self.url, headers=HTTP_HEADERS, latest_event_id=self.last_event_id, timeout=30) as event_source:
|
||||||
self.event_source = event_source
|
self.event_source = event_source
|
||||||
for event in self.event_source:
|
for event in self.event_source:
|
||||||
if event.type == 'message':
|
if event.type == 'message':
|
||||||
|
|||||||
Reference in New Issue
Block a user