mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2025-10-27 08:49:27 +00:00
Compare commits
2 Commits
8c2ab61049
...
ae72649df8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ae72649df8 | ||
|
|
b4d88a4770 |
65
alertproviders/parksnpeaks.py
Normal file
65
alertproviders/parksnpeaks.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from alertproviders.http_alert_provider import HTTPAlertProvider
|
||||||
|
from data.alert import Alert
|
||||||
|
|
||||||
|
|
||||||
|
# Alert provider for Parks n Peaks
|
||||||
|
class ParksNPeaks(HTTPAlertProvider):
|
||||||
|
POLL_INTERVAL_SEC = 3600
|
||||||
|
ALERTS_URL = "http://parksnpeaks.org/api/ALERTS/"
|
||||||
|
|
||||||
|
def __init__(self, provider_config):
|
||||||
|
super().__init__(provider_config, self.ALERTS_URL, self.POLL_INTERVAL_SEC)
|
||||||
|
|
||||||
|
def http_response_to_alerts(self, http_response):
|
||||||
|
new_alerts = []
|
||||||
|
# Iterate through source data
|
||||||
|
for source_alert in http_response.json():
|
||||||
|
# Calculate some things
|
||||||
|
if " - " in source_alert["Location"]:
|
||||||
|
split = source_alert["Location"].split(" - ")
|
||||||
|
sig_ref = split[0]
|
||||||
|
sig_ref_name = split[1]
|
||||||
|
else:
|
||||||
|
sig_ref = source_alert["WWFFID"]
|
||||||
|
sig_ref_name = source_alert["Location"]
|
||||||
|
start_time = datetime.strptime(source_alert["alTime"], "%Y-%m-%d %H:%M:%S").replace(
|
||||||
|
tzinfo=pytz.UTC).timestamp()
|
||||||
|
|
||||||
|
# Convert to our alert format
|
||||||
|
alert = Alert(source=self.name,
|
||||||
|
source_id=source_alert["alID"],
|
||||||
|
dx_calls=[source_alert["CallSign"].upper()],
|
||||||
|
freqs_modes=source_alert["Freq"] + " " + source_alert["MODE"],
|
||||||
|
comment=source_alert["Comments"],
|
||||||
|
sig=source_alert["Class"],
|
||||||
|
sig_refs=[sig_ref],
|
||||||
|
sig_refs_names=[sig_ref_name],
|
||||||
|
start_time=start_time,
|
||||||
|
is_dxpedition=False)
|
||||||
|
|
||||||
|
# PNP supports a bunch of programs which should have different icons
|
||||||
|
if alert.sig == "SiOTA":
|
||||||
|
alert.icon = "wheat-awn"
|
||||||
|
elif alert.sig == "ZLOTA":
|
||||||
|
alert.icon = "kiwi-bird"
|
||||||
|
elif alert.sig == "KRMNPA":
|
||||||
|
alert.icon = "earth-oceania"
|
||||||
|
elif alert.sig in ["POTA", "SOTA", "WWFF"]:
|
||||||
|
# Don't care about an icon as this will be rejected anyway, we have better data from POTA/SOTA/WWFF direct
|
||||||
|
alert.icon = ""
|
||||||
|
else:
|
||||||
|
# Unknown programme we've never seen before
|
||||||
|
logging.warn(
|
||||||
|
"PNP alert found with sig " + alert.sig + ", developer needs to add support for this and set an icon!")
|
||||||
|
alert.icon = "question"
|
||||||
|
|
||||||
|
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. Otherwise, add to
|
||||||
|
# the alert list.
|
||||||
|
if alert.sig not in ["POTA", "SOTA", "WWFF"]:
|
||||||
|
new_alerts.append(alert)
|
||||||
|
return new_alerts
|
||||||
@@ -84,6 +84,10 @@ alert-providers:
|
|||||||
class: "WWFF"
|
class: "WWFF"
|
||||||
name: "WWFF"
|
name: "WWFF"
|
||||||
enabled: true
|
enabled: true
|
||||||
|
-
|
||||||
|
class: "ParksNPeaks"
|
||||||
|
name: "ParksNPeaks"
|
||||||
|
enabled: true
|
||||||
-
|
-
|
||||||
class: "NG3K"
|
class: "NG3K"
|
||||||
name: "NG3K"
|
name: "NG3K"
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ SOFTWARE_VERSION = "0.1"
|
|||||||
HTTP_HEADERS = {"User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION + " (operated by " + SERVER_OWNER_CALLSIGN + ")"}
|
HTTP_HEADERS = {"User-Agent": SOFTWARE_NAME + " " + SOFTWARE_VERSION + " (operated by " + SERVER_OWNER_CALLSIGN + ")"}
|
||||||
|
|
||||||
# Special Interest Groups
|
# Special Interest Groups
|
||||||
SIGS = ["POTA", "SOTA", "WWFF", "GMA", "WWBOTA", "HEMA", "MOTA", "ARLHS", "ILLW", "SiOTA", "WCA", "ZLOTA", "IOTA"]
|
SIGS = ["POTA", "SOTA", "WWFF", "GMA", "WWBOTA", "HEMA", "MOTA", "ARLHS", "ILLW", "SiOTA", "WCA", "ZLOTA", "IOTA", "KRMNPA"]
|
||||||
|
|
||||||
# Modes. Note "DIGI" and "DIGITAL" are also supported but are normalised into "DATA".
|
# Modes. Note "DIGI" and "DIGITAL" are also supported but are normalised into "DATA".
|
||||||
CW_MODES = ["CW"]
|
CW_MODES = ["CW"]
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import copy
|
import copy
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
import re
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
@@ -105,6 +106,11 @@ class Alert:
|
|||||||
if self.dx_calls and not self.dx_names:
|
if self.dx_calls and not self.dx_names:
|
||||||
self.dx_names = list(map(lambda c: lookup_helper.infer_name_from_callsign(c), self.dx_calls))
|
self.dx_names = list(map(lambda c: lookup_helper.infer_name_from_callsign(c), self.dx_calls))
|
||||||
|
|
||||||
|
# Clean up comments
|
||||||
|
if self.comment:
|
||||||
|
comment = re.sub(r"\(de [A-Za-z0-9]*\)", "", self.comment)
|
||||||
|
self.comment = comment.strip()
|
||||||
|
|
||||||
# Always create an ID based on a hash of every parameter *except* received_time. This is used as the index
|
# Always create an ID based on a hash of every parameter *except* received_time. This is used as the index
|
||||||
# to a map, which as a byproduct avoids us having multiple duplicate copies of the object that are identical
|
# to a map, which as a byproduct avoids us having multiple duplicate copies of the object that are identical
|
||||||
# apart from that they were retrieved from the API at different times. Note that the simple Python hash()
|
# apart from that they were retrieved from the API at different times. Note that the simple Python hash()
|
||||||
|
|||||||
@@ -31,8 +31,8 @@ class WebServer:
|
|||||||
bottle.BaseTemplate.defaults['software_version'] = SOFTWARE_VERSION
|
bottle.BaseTemplate.defaults['software_version'] = SOFTWARE_VERSION
|
||||||
|
|
||||||
# Routes for API calls
|
# Routes for API calls
|
||||||
bottle.get("/api/v1/spots")(lambda: self.serve_api(self.get_spot_list_with_filters()))
|
bottle.get("/api/v1/spots")(lambda: self.serve_spots_api())
|
||||||
bottle.get("/api/v1/alerts")(lambda: self.serve_api(self.get_alert_list_with_filters()))
|
bottle.get("/api/v1/alerts")(lambda: self.serve_alerts_api())
|
||||||
bottle.get("/api/v1/options")(lambda: self.serve_api(self.get_options()))
|
bottle.get("/api/v1/options")(lambda: self.serve_api(self.get_options()))
|
||||||
bottle.get("/api/v1/status")(lambda: self.serve_api(self.status_data))
|
bottle.get("/api/v1/status")(lambda: self.serve_api(self.status_data))
|
||||||
bottle.post("/api/v1/spot")(lambda: self.accept_spot())
|
bottle.post("/api/v1/spot")(lambda: self.accept_spot())
|
||||||
@@ -56,6 +56,38 @@ class WebServer:
|
|||||||
self.status = "Waiting"
|
self.status = "Waiting"
|
||||||
run(host='localhost', port=self.port)
|
run(host='localhost', port=self.port)
|
||||||
|
|
||||||
|
# Serve the JSON API /spots endpoint
|
||||||
|
def serve_spots_api(self):
|
||||||
|
try:
|
||||||
|
data = self.get_spot_list_with_filters()
|
||||||
|
return self.serve_api(data)
|
||||||
|
except ValueError as e:
|
||||||
|
logging.error(e)
|
||||||
|
response.content_type = 'application/json'
|
||||||
|
response.status = 400
|
||||||
|
return json.dumps("Bad request - " + str(e), default=serialize_everything)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(e)
|
||||||
|
response.content_type = 'application/json'
|
||||||
|
response.status = 500
|
||||||
|
return json.dumps("Error - " + str(e), default=serialize_everything)
|
||||||
|
|
||||||
|
# Serve the JSON API /alerts endpoint
|
||||||
|
def serve_alerts_api(self):
|
||||||
|
try:
|
||||||
|
data = self.get_alert_list_with_filters()
|
||||||
|
return self.serve_api(data)
|
||||||
|
except ValueError as e:
|
||||||
|
logging.error(e)
|
||||||
|
response.content_type = 'application/json'
|
||||||
|
response.status = 400
|
||||||
|
return json.dumps("Bad request - " + str(e), default=serialize_everything)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(e)
|
||||||
|
response.content_type = 'application/json'
|
||||||
|
response.status = 500
|
||||||
|
return json.dumps("Error - " + str(e), default=serialize_everything)
|
||||||
|
|
||||||
# Serve a JSON API endpoint
|
# Serve a JSON API endpoint
|
||||||
def serve_api(self, data):
|
def serve_api(self, data):
|
||||||
self.last_api_access_time = datetime.now(pytz.UTC)
|
self.last_api_access_time = datetime.now(pytz.UTC)
|
||||||
@@ -109,6 +141,7 @@ class WebServer:
|
|||||||
|
|
||||||
response.content_type = 'application/json'
|
response.content_type = 'application/json'
|
||||||
response.set_header('Cache-Control', 'no-store')
|
response.set_header('Cache-Control', 'no-store')
|
||||||
|
response.status = 201
|
||||||
return json.dumps("OK", default=serialize_everything)
|
return json.dumps("OK", default=serialize_everything)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(e)
|
logging.error(e)
|
||||||
|
|||||||
@@ -43,11 +43,17 @@ class ParksNPeaks(HTTPSpotProvider):
|
|||||||
time=datetime.strptime(source_spot["actTime"], "%Y-%m-%d %H:%M:%S").replace(
|
time=datetime.strptime(source_spot["actTime"], "%Y-%m-%d %H:%M:%S").replace(
|
||||||
tzinfo=pytz.UTC).timestamp())
|
tzinfo=pytz.UTC).timestamp())
|
||||||
|
|
||||||
|
# Free text location is not present in all spots, so only add it if it's set
|
||||||
|
if "actLocation" in source_spot and source_spot["actLocation"] != "":
|
||||||
|
spot.sig_refs_names = [source_spot["actLocation"]]
|
||||||
|
|
||||||
# PNP supports a bunch of programs which should have different icons
|
# PNP supports a bunch of programs which should have different icons
|
||||||
if spot.sig == "SiOTA":
|
if spot.sig == "SiOTA":
|
||||||
spot.icon = "wheat-awn"
|
spot.icon = "wheat-awn"
|
||||||
elif spot.sig == "ZLOTA":
|
elif spot.sig == "ZLOTA":
|
||||||
spot.icon = "kiwi-bird"
|
spot.icon = "kiwi-bird"
|
||||||
|
elif spot.sig == "KRMNPA":
|
||||||
|
spot.icon = "earth-oceania"
|
||||||
elif spot.sig in ["POTA", "SOTA", "WWFF"]:
|
elif spot.sig in ["POTA", "SOTA", "WWFF"]:
|
||||||
# Don't care about an icon as this will be rejected anyway, we have better data from POTA/SOTA/WWFF direct
|
# Don't care about an icon as this will be rejected anyway, we have better data from POTA/SOTA/WWFF direct
|
||||||
spot.icon = ""
|
spot.icon = ""
|
||||||
@@ -80,6 +86,8 @@ class ParksNPeaks(HTTPSpotProvider):
|
|||||||
spot.de_call = None
|
spot.de_call = None
|
||||||
break
|
break
|
||||||
|
|
||||||
|
# Note there is currently no support for KRMNPA location lookup, see issue #61.
|
||||||
|
|
||||||
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. Otherwise, add to
|
# If this is POTA, SOTA or WWFF data we already have it through other means, so ignore. Otherwise, add to
|
||||||
# the spot list.
|
# the spot list.
|
||||||
if spot.sig not in ["POTA", "SOTA", "WWFF"]:
|
if spot.sig not in ["POTA", "SOTA", "WWFF"]:
|
||||||
|
|||||||
Reference in New Issue
Block a user