starlink-grpc-tools/starlink_json.py

193 lines
6.1 KiB
Python
Raw Normal View History

"""Parser for JSON format gRPC output from a Starlink user terminal.
Expects input as from grpcurl get_history request.
Handling output for other request responses may be added in the future, but
the others don't really need as much interpretation as the get_history
response does.
See the starlink_grpc module docstring for descriptions of the stat elements.
"""
import json
import sys
from itertools import chain
2021-01-12 21:51:38 -06:00
class JsonError(Exception):
"""Provides error info when something went wrong with JSON parsing."""
def history_ping_field_names():
"""Return the field names of the packet loss stats.
Returns:
A tuple with 3 lists, the first with general stat names, the second
with ping drop stat names, and the third with ping drop run length
stat names.
"""
return [
2021-01-15 21:27:10 -06:00
"samples",
], [
"total_ping_drop",
"count_full_ping_drop",
"count_obstructed",
"total_obstructed_ping_drop",
"count_full_obstructed_ping_drop",
"count_unscheduled",
"total_unscheduled_ping_drop",
2021-01-15 21:27:10 -06:00
"count_full_unscheduled_ping_drop",
], [
"init_run_fragment",
"final_run_fragment",
"run_seconds",
2021-01-15 21:27:10 -06:00
"run_minutes",
]
2021-01-15 21:27:10 -06:00
def get_history(filename):
"""Read JSON data and return the raw history in dict format.
Args:
filename (str): Filename from which to read JSON data, or "-" to read
from standard input.
2021-01-12 21:51:38 -06:00
Raises:
Various exceptions depending on Python version: Failure to open or
read input or invalid JSON read on input.
"""
if filename == "-":
json_data = json.load(sys.stdin)
else:
2021-01-12 21:51:38 -06:00
with open(filename) as json_file:
json_data = json.load(json_file)
return json_data["dishGetHistory"]
2021-01-15 21:27:10 -06:00
def history_ping_stats(filename, parse_samples, verbose=False):
"""Fetch, parse, and compute the packet loss stats.
Args:
filename (str): Filename from which to read JSON data, or "-" to read
from standard input.
parse_samples (int): Number of samples to process, or -1 to parse all
available samples.
verbose (bool): Optionally produce verbose output.
Returns:
2021-01-12 21:51:38 -06:00
A tuple with 3 dicts, the first mapping general stat names to their
values, the second mapping ping drop stat names to their values and
the third mapping ping drop run length stat names to their values.
2021-01-12 21:51:38 -06:00
Raises:
JsonError: Failure to open, read, or parse JSON on input.
"""
try:
history = get_history(filename)
2021-01-12 21:51:38 -06:00
except ValueError as e:
raise JsonError("Failed to parse JSON: " + str(e))
except Exception as e:
2021-01-12 21:51:38 -06:00
raise JsonError(e)
# "current" is the count of data samples written to the ring buffer,
# irrespective of buffer wrap.
current = int(history["current"])
samples = len(history["popPingDropRate"])
if verbose:
print("current counter: " + str(current))
print("All samples: " + str(samples))
samples = min(samples, current)
if verbose:
print("Valid samples: " + str(samples))
# This is ring buffer offset, so both index to oldest data sample and
# index to next data sample after the newest one.
offset = current % samples
tot = 0.0
count_full_drop = 0
count_unsched = 0
total_unsched_drop = 0.0
count_full_unsched = 0
count_obstruct = 0
total_obstruct_drop = 0.0
count_full_obstruct = 0
second_runs = [0] * 60
minute_runs = [0] * 60
run_length = 0
init_run_length = None
if parse_samples < 0 or samples < parse_samples:
parse_samples = samples
# Parse the most recent parse_samples-sized set of samples. This will
# iterate samples in order from oldest to newest.
if parse_samples <= offset:
sample_range = range(offset - parse_samples, offset)
else:
sample_range = chain(range(samples + offset - parse_samples, samples), range(0, offset))
for i in sample_range:
d = history["popPingDropRate"][i]
if d >= 1:
# just in case...
d = 1
count_full_drop += 1
run_length += 1
elif run_length > 0:
if init_run_length is None:
init_run_length = run_length
else:
if run_length <= 60:
second_runs[run_length - 1] += run_length
else:
2021-01-15 21:27:10 -06:00
minute_runs[min((run_length-1) // 60 - 1, 59)] += run_length
run_length = 0
elif init_run_length is None:
init_run_length = 0
if not history["scheduled"][i]:
count_unsched += 1
total_unsched_drop += d
if d >= 1:
count_full_unsched += 1
# scheduled=false and obstructed=true do not ever appear to overlap,
# but in case they do in the future, treat that as just unscheduled
# in order to avoid double-counting it.
elif history["obstructed"][i]:
count_obstruct += 1
total_obstruct_drop += d
if d >= 1:
count_full_obstruct += 1
tot += d
# If the entire sample set is one big drop run, it will be both initial
# fragment (continued from prior sample range) and final one (continued
# to next sample range), but to avoid double-reporting, just call it
# the initial run.
if init_run_length is None:
init_run_length = run_length
run_length = 0
return {
2021-01-15 21:27:10 -06:00
"samples": parse_samples,
}, {
"total_ping_drop": tot,
"count_full_ping_drop": count_full_drop,
"count_obstructed": count_obstruct,
"total_obstructed_ping_drop": total_obstruct_drop,
"count_full_obstructed_ping_drop": count_full_obstruct,
"count_unscheduled": count_unsched,
"total_unscheduled_ping_drop": total_unsched_drop,
2021-01-15 21:27:10 -06:00
"count_full_unscheduled_ping_drop": count_full_unsched,
}, {
"init_run_fragment": init_run_length,
"final_run_fragment": run_length,
"run_seconds": second_runs,
2021-01-15 21:27:10 -06:00
"run_minutes": minute_runs,
}