mirror of
https://git.ianrenton.com/ian/spothole.git
synced 2026-03-15 12:24:29 +00:00
Bulk convert comments above classes/functions/methods into proper docstrings
This commit is contained in:
@@ -14,8 +14,9 @@ SSE_HANDLER_MAX_QUEUE_SIZE = 1000
|
||||
SSE_HANDLER_QUEUE_CHECK_INTERVAL = 5000
|
||||
|
||||
|
||||
# API request handler for /api/v1/spots
|
||||
class APISpotsHandler(tornado.web.RequestHandler):
|
||||
"""API request handler for /api/v1/spots"""
|
||||
|
||||
def initialize(self, spots, web_server_metrics):
|
||||
self.spots = spots
|
||||
self.web_server_metrics = web_server_metrics
|
||||
@@ -48,19 +49,22 @@ class APISpotsHandler(tornado.web.RequestHandler):
|
||||
self.set_header("Content-Type", "application/json")
|
||||
|
||||
|
||||
# API request handler for /api/v1/spots/stream
|
||||
class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
|
||||
"""API request handler for /api/v1/spots/stream"""
|
||||
|
||||
def initialize(self, sse_spot_queues, web_server_metrics):
|
||||
self.sse_spot_queues = sse_spot_queues
|
||||
self.web_server_metrics = web_server_metrics
|
||||
|
||||
# Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data
|
||||
def custom_headers(self):
|
||||
"""Custom headers to avoid e.g. nginx reverse proxy from buffering SSE data"""
|
||||
|
||||
return {"Cache-Control": "no-store",
|
||||
"X-Accel-Buffering": "no"}
|
||||
|
||||
# Called once on the client opening a connection, set things up
|
||||
def open(self):
|
||||
"""Called once on the client opening a connection, set things up"""
|
||||
|
||||
try:
|
||||
# Metrics
|
||||
self.web_server_metrics["last_api_access_time"] = datetime.now(pytz.UTC)
|
||||
@@ -83,8 +87,9 @@ class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
|
||||
except Exception as e:
|
||||
logging.warn("Exception when serving SSE socket", e)
|
||||
|
||||
# When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it
|
||||
def close(self):
|
||||
"""When the user closes the socket, empty our queue and remove it from the list so the server no longer fills it"""
|
||||
|
||||
try:
|
||||
if self.spot_queue in self.sse_spot_queues:
|
||||
self.sse_spot_queues.remove(self.spot_queue)
|
||||
@@ -98,8 +103,9 @@ class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
|
||||
self.spot_queue = None
|
||||
super().close()
|
||||
|
||||
# Callback to check if anything has arrived in the queue, and if so send it to the client
|
||||
def _callback(self):
|
||||
"""Callback to check if anything has arrived in the queue, and if so send it to the client"""
|
||||
|
||||
try:
|
||||
if self.spot_queue:
|
||||
while not self.spot_queue.empty():
|
||||
@@ -116,10 +122,10 @@ class APISpotsStreamHandler(tornado_eventsource.handler.EventSourceHandler):
|
||||
self.close()
|
||||
|
||||
|
||||
|
||||
# Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in
|
||||
# the main "spots" GET call.
|
||||
def get_spot_list_with_filters(all_spots, query):
|
||||
"""Utility method to apply filters to the overall spot list and return only a subset. Enables query parameters in
|
||||
the main "spots" GET call."""
|
||||
|
||||
# Create a shallow copy of the spot list, ordered by spot time, then filter the list to reduce it only to spots
|
||||
# that match the filter parameters in the query string. Finally, apply a limit to the number of spots returned.
|
||||
# The list of query string filters is defined in the API docs.
|
||||
@@ -142,22 +148,24 @@ def get_spot_list_with_filters(all_spots, query):
|
||||
# duplicates are fine in the main spot list (e.g. different cluster spots of the same DX) this doesn't
|
||||
# work well for the other views.
|
||||
if "dedupe" in query.keys():
|
||||
dedupe = query.get("dedupe").upper() == "TRUE"
|
||||
if dedupe:
|
||||
spots_temp = []
|
||||
already_seen = []
|
||||
for s in spots:
|
||||
call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "")
|
||||
if call_plus_ssid not in already_seen:
|
||||
spots_temp.append(s)
|
||||
already_seen.append(call_plus_ssid)
|
||||
spots = spots_temp
|
||||
dedupe = query.get("dedupe").upper() == "TRUE"
|
||||
if dedupe:
|
||||
spots_temp = []
|
||||
already_seen = []
|
||||
for s in spots:
|
||||
call_plus_ssid = s.dx_call + (s.dx_ssid if s.dx_ssid else "")
|
||||
if call_plus_ssid not in already_seen:
|
||||
spots_temp.append(s)
|
||||
already_seen.append(call_plus_ssid)
|
||||
spots = spots_temp
|
||||
|
||||
return spots
|
||||
|
||||
# Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list
|
||||
# of query parameters and their function is defined in the API docs.
|
||||
|
||||
def spot_allowed_by_query(spot, query):
|
||||
"""Given URL query params and a spot, figure out if the spot "passes" the requested filters or is rejected. The list
|
||||
of query parameters and their function is defined in the API docs."""
|
||||
|
||||
for k in query.keys():
|
||||
match k:
|
||||
case "since":
|
||||
@@ -240,4 +248,4 @@ def spot_allowed_by_query(spot, query):
|
||||
needs_good_location = query.get(k).upper() == "TRUE"
|
||||
if needs_good_location and not spot.dx_location_good:
|
||||
return False
|
||||
return True
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user