mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2026-02-04 01:04:33 +00:00
Implement more request handlers in Tornado #3
This commit is contained in:
@@ -1,7 +1,132 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
import tornado
|
||||
|
||||
from core.config import ALLOW_SPOTTING, MAX_SPOT_AGE
|
||||
from core.constants import UNKNOWN_BAND
|
||||
from core.lookup_helper import lookup_helper
|
||||
from core.sig_utils import get_ref_regex_for_sig
|
||||
from core.utils import serialize_everything
|
||||
from data.sig_ref import SIGRef
|
||||
from data.spot import Spot
|
||||
|
||||
|
||||
# API request handler for /api/v1/spot (POST)
|
||||
class APISpotHandler(tornado.web.RequestHandler):
|
||||
def initialize(self, spots):
|
||||
self.spots = spots
|
||||
|
||||
def post(self):
|
||||
# todo
|
||||
self.write("Hello, world")
|
||||
try:
|
||||
# Reject if not allowed
|
||||
if not ALLOW_SPOTTING:
|
||||
self.set_status(401)
|
||||
self.write(json.dumps("Error - this server does not allow new spots to be added via the API.",
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Reject if format not json
|
||||
if 'Content-Type' not in self.request.headers or self.request.headers.get('Content-Type') != "application/json":
|
||||
self.set_status(415)
|
||||
self.write(json.dumps("Error - request Content-Type must be application/json", default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Reject if request body is empty
|
||||
post_data = self.request.body
|
||||
if not post_data:
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - request body is empty", default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Read in the request body as JSON then convert to a Spot object
|
||||
json_spot = tornado.escape.json_decode(post_data)
|
||||
spot = Spot(**json_spot)
|
||||
|
||||
# Converting to a spot object this way won't have coped with sig_ref objects, so fix that. (Would be nice to
|
||||
# redo this in a functional style)
|
||||
if spot.sig_refs:
|
||||
real_sig_refs = []
|
||||
for dict_obj in spot.sig_refs:
|
||||
real_sig_refs.append(json.loads(json.dumps(dict_obj), object_hook=lambda d: SIGRef(**d)))
|
||||
spot.sig_refs = real_sig_refs
|
||||
|
||||
# Reject if no timestamp, frequency, dx_call or de_call
|
||||
if not spot.time or not spot.dx_call or not spot.freq or not spot.de_call:
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - 'time', 'dx_call', 'freq' and 'de_call' must be provided as a minimum.",
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Reject invalid-looking callsigns
|
||||
if not re.match(r"^[A-Za-z0-9/\-]*$", spot.dx_call):
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - '" + spot.dx_call + "' does not look like a valid callsign.",
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
if not re.match(r"^[A-Za-z0-9/\-]*$", spot.de_call):
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - '" + spot.de_call + "' does not look like a valid callsign.",
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Reject if frequency not in a known band
|
||||
if lookup_helper.infer_band_from_freq(spot.freq) == UNKNOWN_BAND:
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - Frequency of " + str(spot.freq / 1000.0) + "kHz is not in a known band.",
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Reject if grid formatting incorrect
|
||||
if spot.dx_grid and not re.match(
|
||||
r"^([A-R]{2}[0-9]{2}[A-X]{2}[0-9]{2}[A-X]{2}|[A-R]{2}[0-9]{2}[A-X]{2}[0-9]{2}|[A-R]{2}[0-9]{2}[A-X]{2}|[A-R]{2}[0-9]{2})$",
|
||||
spot.dx_grid.upper()):
|
||||
self.set_status(422)
|
||||
self.write(json.dumps("Error - '" + spot.dx_grid + "' does not look like a valid Maidenhead grid.",
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# Reject if sig_ref format incorrect for sig
|
||||
if spot.sig and spot.sig_refs and len(spot.sig_refs) > 0 and spot.sig_refs[0].id and get_ref_regex_for_sig(
|
||||
spot.sig) and not re.match(get_ref_regex_for_sig(spot.sig), spot.sig_refs[0].id):
|
||||
self.set_status(422)
|
||||
self.write(json.dumps(
|
||||
"Error - '" + spot.sig_refs[0].id + "' does not look like a valid reference for " + spot.sig + ".",
|
||||
default=serialize_everything))
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
return
|
||||
|
||||
# infer missing data, and add it to our database.
|
||||
spot.source = "API"
|
||||
spot.infer_missing()
|
||||
self.spots.add(spot.id, spot, expire=MAX_SPOT_AGE)
|
||||
|
||||
self.write(json.dumps("OK", default=serialize_everything))
|
||||
self.set_status(201)
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
self.write(json.dumps("Error - " + str(e), default=serialize_everything))
|
||||
self.set_status(500)
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
@@ -1,19 +1,81 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
import pytz
|
||||
import tornado
|
||||
|
||||
from core.utils import serialize_everything
|
||||
|
||||
|
||||
# API request handler for /api/v1/alerts
|
||||
class APIAlertsHandler(tornado.web.RequestHandler):
|
||||
def initialize(self, alerts):
|
||||
self.alerts = alerts
|
||||
|
||||
def get(self):
|
||||
# todo
|
||||
self.write("Hello, world")
|
||||
try:
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
# reduce that to just the first entry, and convert bytes to string
|
||||
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
|
||||
# Fetch all alerts matching the query
|
||||
data = get_alert_list_with_filters(self.alerts, query_params)
|
||||
self.write(json.dumps(data, default=serialize_everything))
|
||||
self.set_status(200)
|
||||
except ValueError as e:
|
||||
logging.error(e)
|
||||
self.write(json.dumps("Bad request - " + str(e), default=serialize_everything))
|
||||
self.set_status(400)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
self.write(json.dumps("Error - " + str(e), default=serialize_everything))
|
||||
self.set_status(500)
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
# API request handler for /api/v1/alerts/stream
|
||||
class APIAlertsStreamHandler(tornado.web.RequestHandler):
|
||||
def get(self):
|
||||
# todo
|
||||
self.write("Hello, world")
|
||||
# try:
|
||||
# response.content_type = 'text/event-stream'
|
||||
# response.cache_control = 'no-cache'
|
||||
# yield 'retry: 1000\n\n'
|
||||
#
|
||||
# alert_queue = Queue(maxsize=100)
|
||||
# self.sse_alert_queues.append(alert_queue)
|
||||
# while True:
|
||||
# if alert_queue.empty():
|
||||
# gevent.sleep(1)
|
||||
# else:
|
||||
# alert = alert_queue.get()
|
||||
# yield 'data: ' + json.dumps(alert, default=serialize_everything) + '\n\n'
|
||||
# except Exception as e:
|
||||
# logging.warn("Exception when serving SSE socket", e)
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
# Utility method to apply filters to the overall alert list and return only a subset. Enables query parameters in
|
||||
# the main "alerts" GET call.
|
||||
def get_alert_list_with_filters(all_alerts, query):
|
||||
# Create a shallow copy of the alert list ordered by start time, then filter the list to reduce it only to alerts
|
||||
# that match the filter parameters in the query string. Finally, apply a limit to the number of alerts returned.
|
||||
# The list of query string filters is defined in the API docs.
|
||||
alert_ids = list(all_alerts.iterkeys())
|
||||
alerts = []
|
||||
for k in alert_ids:
|
||||
a = all_alerts.get(k)
|
||||
if a is not None:
|
||||
alerts.append(a)
|
||||
alerts = sorted(alerts, key=lambda alert: (alert.start_time if alert and alert.start_time else 0))
|
||||
alerts = list(filter(lambda alert: alert_allowed_by_query(alert, query), alerts))
|
||||
if "limit" in query.keys():
|
||||
alerts = alerts[:int(query.get("limit"))]
|
||||
return alerts
|
||||
|
||||
# Given URL query params and an alert, figure out if the alert "passes" the requested filters or is rejected. The list
|
||||
# of query parameters and their function is defined in the API docs.
|
||||
def alert_allowed_by_query(alert, query):
|
||||
|
||||
@@ -1,13 +1,100 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
import tornado
|
||||
|
||||
from core.constants import SIGS
|
||||
from core.sig_utils import get_ref_regex_for_sig, populate_sig_ref_info
|
||||
from core.utils import serialize_everything
|
||||
from data.sig_ref import SIGRef
|
||||
from data.spot import Spot
|
||||
|
||||
|
||||
# API request handler for /api/v1/lookup/call
|
||||
class APILookupCallHandler(tornado.web.RequestHandler):
|
||||
def get(self):
|
||||
# todo
|
||||
self.write("Hello, world")
|
||||
try:
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
# reduce that to just the first entry, and convert bytes to string
|
||||
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
|
||||
# The "call" query param must exist and look like a callsign
|
||||
if "call" in query_params.keys():
|
||||
call = query_params.get("call").upper()
|
||||
if re.match(r"^[A-Z0-9/\-]*$", call):
|
||||
# Take the callsign, make a "fake spot" so we can run infer_missing() on it, then repack the
|
||||
# resulting data in the correct way for the API response.
|
||||
fake_spot = Spot(dx_call=call)
|
||||
fake_spot.infer_missing()
|
||||
data = {
|
||||
"call": call,
|
||||
"name": fake_spot.dx_name,
|
||||
"qth": fake_spot.dx_qth,
|
||||
"country": fake_spot.dx_country,
|
||||
"flag": fake_spot.dx_flag,
|
||||
"continent": fake_spot.dx_continent,
|
||||
"dxcc_id": fake_spot.dx_dxcc_id,
|
||||
"cq_zone": fake_spot.dx_cq_zone,
|
||||
"itu_zone": fake_spot.dx_itu_zone,
|
||||
"grid": fake_spot.dx_grid,
|
||||
"latitude": fake_spot.dx_latitude,
|
||||
"longitude": fake_spot.dx_longitude,
|
||||
"location_source": fake_spot.dx_location_source
|
||||
}
|
||||
self.write(json.dumps(data, default=serialize_everything))
|
||||
|
||||
else:
|
||||
self.write(json.dumps("Error - '" + call + "' does not look like a valid callsign.",
|
||||
default=serialize_everything))
|
||||
self.set_status(422)
|
||||
else:
|
||||
self.write(json.dumps("Error - call must be provided", default=serialize_everything))
|
||||
self.set_status(422)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
self.write(json.dumps("Error - " + str(e), default=serialize_everything))
|
||||
self.set_status(500)
|
||||
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
|
||||
# API request handler for /api/v1/lookup/sigref
|
||||
class APILookupSIGRefHandler(tornado.web.RequestHandler):
|
||||
def get(self):
|
||||
# todo
|
||||
self.write("Hello, world")
|
||||
try:
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
# reduce that to just the first entry, and convert bytes to string
|
||||
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
|
||||
# "sig" and "id" query params must exist, SIG must be known, and if we have a reference regex for that SIG,
|
||||
# the provided id must match it.
|
||||
if "sig" in query_params.keys() and "id" in query_params.keys():
|
||||
sig = query_params.get("sig").upper()
|
||||
id = query_params.get("id").upper()
|
||||
if sig in list(map(lambda p: p.name, SIGS)):
|
||||
if not get_ref_regex_for_sig(sig) or re.match(get_ref_regex_for_sig(sig), id):
|
||||
data = populate_sig_ref_info(SIGRef(id=id, sig=sig))
|
||||
self.write(json.dumps(data, default=serialize_everything))
|
||||
|
||||
else:
|
||||
self.write(
|
||||
json.dumps("Error - '" + id + "' does not look like a valid reference ID for " + sig + ".",
|
||||
default=serialize_everything))
|
||||
self.set_status(422)
|
||||
else:
|
||||
self.write(json.dumps("Error - sig '" + sig + "' is not known.", default=serialize_everything))
|
||||
self.set_status(422)
|
||||
else:
|
||||
self.write(json.dumps("Error - sig and id must be provided", default=serialize_everything))
|
||||
self.set_status(422)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
self.write(json.dumps("Error - " + str(e), default=serialize_everything))
|
||||
self.set_status(500)
|
||||
|
||||
self.set_header("Cache-Control", "no-store")
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
@@ -16,8 +16,9 @@ class APISpotsHandler(tornado.web.RequestHandler):
|
||||
def get(self):
|
||||
try:
|
||||
# request.arguments contains lists for each param key because technically the client can supply multiple,
|
||||
# reduce that to just the first entry
|
||||
query_params = {k: v[0] for k, v in self.request.arguments.items()}
|
||||
# reduce that to just the first entry, and convert bytes to string
|
||||
query_params = {k: v[0].decode("utf-8") for k, v in self.request.arguments.items()}
|
||||
|
||||
# Fetch all spots matching the query
|
||||
data = get_spot_list_with_filters(self.spots, query_params)
|
||||
self.write(json.dumps(data, default=serialize_everything))
|
||||
@@ -38,7 +39,22 @@ class APISpotsHandler(tornado.web.RequestHandler):
|
||||
class APISpotsStreamHandler(tornado.web.RequestHandler):
|
||||
def get(self):
|
||||
# todo
|
||||
self.write("Hello, world")
|
||||
# try:
|
||||
# response.content_type = 'text/event-stream'
|
||||
# response.cache_control = 'no-cache'
|
||||
# yield 'retry: 1000\n\n'
|
||||
#
|
||||
# spot_queue = Queue(maxsize=100)
|
||||
# self.sse_spot_queues.append(spot_queue)
|
||||
# while True:
|
||||
# if spot_queue.empty():
|
||||
# gevent.sleep(1)
|
||||
# else:
|
||||
# spot = spot_queue.get()
|
||||
# yield 'data: ' + json.dumps(spot, default=serialize_everything) + '\n\n'
|
||||
# except Exception as e:
|
||||
# logging.warn("Exception when serving SSE socket", e)
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user