mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2026-02-04 09:14:30 +00:00
182 lines
8.2 KiB
Python
182 lines
8.2 KiB
Python
import json
|
|
import logging
|
|
import re
|
|
from datetime import datetime
|
|
|
|
import pytz
|
|
import tornado
|
|
from pyhamtools.locator import locator_to_latlong
|
|
|
|
from core.constants import SIGS
|
|
from core.geo_utils import lat_lon_for_grid_sw_corner_plus_size, lat_lon_to_cq_zone, lat_lon_to_itu_zone
|
|
from core.prometheus_metrics_handler import api_requests_counter
|
|
from core.sig_utils import get_ref_regex_for_sig, populate_sig_ref_info
|
|
from core.utils import serialize_everything
|
|
from data.sig_ref import SIGRef
|
|
from data.spot import Spot
|
|
|
|
|
|
# API request handler for /api/v1/lookup/call
|
|
class APILookupCallHandler(tornado.web.RequestHandler):
|
|
def initialize(self, web_server_metrics):
|
|
self.web_server_metrics = web_server_metrics
|
|
|
|
def get(self):
|
|
try:
|
|
# Metrics
|
|
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
|
self.web_server_metrics["api_access_counter"] += 1
|
|
self.web_server_metrics["status"] = "OK"
|
|
api_requests_counter.inc()
|
|
|
|
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
|
# reduce that to just the first entry, and convert bytes to string
|
|
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
|
|
|
# The "call" query param must exist and look like a callsign
|
|
if "call" in query_params.keys():
|
|
call = query_params.get("call").upper()
|
|
if re.match(r"^[A-Z0-9/\-]*$", call):
|
|
# Take the callsign, make a "fake spot" so we can run infer_missing() on it, then repack the
|
|
# resulting data in the correct way for the API response.
|
|
fake_spot = Spot(dx_call=call)
|
|
fake_spot.infer_missing()
|
|
data = {
|
|
"call": call,
|
|
"name": fake_spot.dx_name,
|
|
"qth": fake_spot.dx_qth,
|
|
"country": fake_spot.dx_country,
|
|
"flag": fake_spot.dx_flag,
|
|
"continent": fake_spot.dx_continent,
|
|
"dxcc_id": fake_spot.dx_dxcc_id,
|
|
"cq_zone": fake_spot.dx_cq_zone,
|
|
"itu_zone": fake_spot.dx_itu_zone,
|
|
"grid": fake_spot.dx_grid,
|
|
"latitude": fake_spot.dx_latitude,
|
|
"longitude": fake_spot.dx_longitude,
|
|
"location_source": fake_spot.dx_location_source
|
|
}
|
|
self.write(json.dumps(data, default=serialize_everything))
|
|
|
|
else:
|
|
self.write(json.dumps("Error - '" + call + "' does not look like a valid callsign.",
|
|
default=serialize_everything))
|
|
self.set_status(422)
|
|
else:
|
|
self.write(json.dumps("Error - call must be provided", default=serialize_everything))
|
|
self.set_status(422)
|
|
|
|
except Exception as e:
|
|
logging.error(e)
|
|
self.write(json.dumps("Error - " + str(e), default=serialize_everything))
|
|
self.set_status(500)
|
|
|
|
self.set_header("Cache-Control", "no-store")
|
|
self.set_header("Content-Type", "application/json")
|
|
|
|
|
|
# API request handler for /api/v1/lookup/sigref
|
|
class APILookupSIGRefHandler(tornado.web.RequestHandler):
|
|
def initialize(self, web_server_metrics):
|
|
self.web_server_metrics = web_server_metrics
|
|
|
|
def get(self):
|
|
try:
|
|
# Metrics
|
|
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
|
self.web_server_metrics["api_access_counter"] += 1
|
|
self.web_server_metrics["status"] = "OK"
|
|
api_requests_counter.inc()
|
|
|
|
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
|
# reduce that to just the first entry, and convert bytes to string
|
|
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
|
|
|
# "sig" and "id" query params must exist, SIG must be known, and if we have a reference regex for that SIG,
|
|
# the provided id must match it.
|
|
if "sig" in query_params.keys() and "id" in query_params.keys():
|
|
sig = query_params.get("sig").upper()
|
|
id = query_params.get("id").upper()
|
|
if sig in list(map(lambda p: p.name, SIGS)):
|
|
if not get_ref_regex_for_sig(sig) or re.match(get_ref_regex_for_sig(sig), id):
|
|
data = populate_sig_ref_info(SIGRef(id=id, sig=sig))
|
|
self.write(json.dumps(data, default=serialize_everything))
|
|
|
|
else:
|
|
self.write(
|
|
json.dumps("Error - '" + id + "' does not look like a valid reference ID for " + sig + ".",
|
|
default=serialize_everything))
|
|
self.set_status(422)
|
|
else:
|
|
self.write(json.dumps("Error - sig '" + sig + "' is not known.", default=serialize_everything))
|
|
self.set_status(422)
|
|
else:
|
|
self.write(json.dumps("Error - sig and id must be provided", default=serialize_everything))
|
|
self.set_status(422)
|
|
|
|
except Exception as e:
|
|
logging.error(e)
|
|
self.write(json.dumps("Error - " + str(e), default=serialize_everything))
|
|
self.set_status(500)
|
|
|
|
self.set_header("Cache-Control", "no-store")
|
|
self.set_header("Content-Type", "application/json")
|
|
|
|
|
|
|
|
# API request handler for /api/v1/lookup/grid
|
|
class APILookupGridHandler(tornado.web.RequestHandler):
|
|
def initialize(self, web_server_metrics):
|
|
self.web_server_metrics = web_server_metrics
|
|
|
|
def get(self):
|
|
try:
|
|
# Metrics
|
|
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
|
self.web_server_metrics["api_access_counter"] += 1
|
|
self.web_server_metrics["status"] = "OK"
|
|
api_requests_counter.inc()
|
|
|
|
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
|
# reduce that to just the first entry, and convert bytes to string
|
|
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
|
|
|
# "grid" query param must exist.
|
|
if "grid" in query_params.keys():
|
|
grid = query_params.get("grid").upper()
|
|
lat, lon, lat_cell_size, lon_cell_size = lat_lon_for_grid_sw_corner_plus_size(grid)
|
|
if lat is not None and lon is not None and lat_cell_size is not None and lon_cell_size is not None:
|
|
center_lat = lat + lat_cell_size / 2.0
|
|
center_lon = lon + lon_cell_size / 2.0
|
|
center_cq_zone = lat_lon_to_cq_zone(center_lat, center_lon)
|
|
center_itu_zone = lat_lon_to_itu_zone(center_lat, center_lon)
|
|
|
|
response = {
|
|
"center" : {
|
|
"latitude": center_lat,
|
|
"longitude": center_lon,
|
|
"cq_zone": center_cq_zone,
|
|
"itu_zone": center_itu_zone
|
|
},
|
|
"southwest" : {
|
|
"latitude": lat,
|
|
"longitude": lon,
|
|
},
|
|
"northeast" : {
|
|
"latitude": lat + lat_cell_size,
|
|
"longitude": lon + lon_cell_size,
|
|
}}
|
|
self.write(json.dumps(response, default=serialize_everything))
|
|
|
|
else:
|
|
self.write(json.dumps("Error - grid must be provided", default=serialize_everything))
|
|
self.set_status(422)
|
|
|
|
except Exception as e:
|
|
logging.error(e)
|
|
self.write(json.dumps("Error - " + str(e), default=serialize_everything))
|
|
self.set_status(500)
|
|
|
|
self.set_header("Cache-Control", "no-store")
|
|
self.set_header("Content-Type", "application/json")
|