Small change to the interface between parser and calling scripts
Move the "samples" stat out of the ping drop group of stats and into a new general stats group. This way, it will make more sense if/when additional stat groups are added, since that stat will apply to all of them.
This commit is contained in:
parent
96b7634a3d
commit
0ee39f61fd
4 changed files with 70 additions and 54 deletions
|
@ -62,11 +62,12 @@ if print_usage or arg_error:
|
||||||
print(" -H: print CSV header instead of parsing file")
|
print(" -H: print CSV header instead of parsing file")
|
||||||
sys.exit(1 if arg_error else 0)
|
sys.exit(1 if arg_error else 0)
|
||||||
|
|
||||||
fields, rl_fields = starlink_grpc.history_ping_field_names()
|
g_fields, pd_fields, rl_fields = starlink_grpc.history_ping_field_names()
|
||||||
|
|
||||||
if print_header:
|
if print_header:
|
||||||
header = ["datetimestamp_utc"]
|
header = ["datetimestamp_utc"]
|
||||||
header.extend(fields)
|
header.extend(g_fields)
|
||||||
|
header.extend(pd_fields)
|
||||||
if run_lengths:
|
if run_lengths:
|
||||||
for field in rl_fields:
|
for field in rl_fields:
|
||||||
if field.startswith("run_"):
|
if field.startswith("run_"):
|
||||||
|
@ -78,23 +79,23 @@ if print_header:
|
||||||
|
|
||||||
timestamp = datetime.datetime.utcnow()
|
timestamp = datetime.datetime.utcnow()
|
||||||
|
|
||||||
stats, rl_stats = starlink_grpc.history_ping_stats(-1 if parse_all else samples,
|
g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(-1 if parse_all else samples,
|
||||||
verbose)
|
verbose)
|
||||||
|
|
||||||
if stats is None or rl_stats is None:
|
if g_stats is None:
|
||||||
# verbose output already happened, so just bail.
|
# verbose output already happened, so just bail.
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print("Parsed samples: " + str(stats["samples"]))
|
print("Parsed samples: " + str(g_stats["samples"]))
|
||||||
print("Total ping drop: " + str(stats["total_ping_drop"]))
|
print("Total ping drop: " + str(pd_stats["total_ping_drop"]))
|
||||||
print("Count of drop == 1: " + str(stats["count_full_ping_drop"]))
|
print("Count of drop == 1: " + str(pd_stats["count_full_ping_drop"]))
|
||||||
print("Obstructed: " + str(stats["count_obstructed"]))
|
print("Obstructed: " + str(pd_stats["count_obstructed"]))
|
||||||
print("Obstructed ping drop: " + str(stats["total_obstructed_ping_drop"]))
|
print("Obstructed ping drop: " + str(pd_stats["total_obstructed_ping_drop"]))
|
||||||
print("Obstructed drop == 1: " + str(stats["count_full_obstructed_ping_drop"]))
|
print("Obstructed drop == 1: " + str(pd_stats["count_full_obstructed_ping_drop"]))
|
||||||
print("Unscheduled: " + str(stats["count_unscheduled"]))
|
print("Unscheduled: " + str(pd_stats["count_unscheduled"]))
|
||||||
print("Unscheduled ping drop: " + str(stats["total_unscheduled_ping_drop"]))
|
print("Unscheduled ping drop: " + str(pd_stats["total_unscheduled_ping_drop"]))
|
||||||
print("Unscheduled drop == 1: " + str(stats["count_full_unscheduled_ping_drop"]))
|
print("Unscheduled drop == 1: " + str(pd_stats["count_full_unscheduled_ping_drop"]))
|
||||||
if run_lengths:
|
if run_lengths:
|
||||||
print("Initial drop run fragment: " + str(rl_stats["init_run_fragment"]))
|
print("Initial drop run fragment: " + str(rl_stats["init_run_fragment"]))
|
||||||
print("Final drop run fragment: " + str(rl_stats["final_run_fragment"]))
|
print("Final drop run fragment: " + str(rl_stats["final_run_fragment"]))
|
||||||
|
@ -102,7 +103,8 @@ if verbose:
|
||||||
print("Per-minute drop runs: " + ", ".join(str(x) for x in rl_stats["run_minutes"]))
|
print("Per-minute drop runs: " + ", ".join(str(x) for x in rl_stats["run_minutes"]))
|
||||||
else:
|
else:
|
||||||
csv_data = [timestamp.replace(microsecond=0).isoformat()]
|
csv_data = [timestamp.replace(microsecond=0).isoformat()]
|
||||||
csv_data.extend(str(stats[field]) for field in fields)
|
csv_data.extend(str(g_stats[field]) for field in g_fields)
|
||||||
|
csv_data.extend(str(pd_stats[field]) for field in pd_fields)
|
||||||
if run_lengths:
|
if run_lengths:
|
||||||
for field in rl_fields:
|
for field in rl_fields:
|
||||||
if field.startswith("run_"):
|
if field.startswith("run_"):
|
||||||
|
|
|
@ -66,11 +66,12 @@ if print_usage or arg_error:
|
||||||
print(" -H: print CSV header instead of parsing file")
|
print(" -H: print CSV header instead of parsing file")
|
||||||
sys.exit(1 if arg_error else 0)
|
sys.exit(1 if arg_error else 0)
|
||||||
|
|
||||||
fields, rl_fields = starlink_json.history_ping_field_names()
|
g_fields, pd_fields, rl_fields = starlink_json.history_ping_field_names()
|
||||||
|
|
||||||
if print_header:
|
if print_header:
|
||||||
header = ["datetimestamp_utc"]
|
header = ["datetimestamp_utc"]
|
||||||
header.extend(fields)
|
header.extend(g_fields)
|
||||||
|
header.extend(pd_fields)
|
||||||
if run_lengths:
|
if run_lengths:
|
||||||
for field in rl_fields:
|
for field in rl_fields:
|
||||||
if field.startswith("run_"):
|
if field.startswith("run_"):
|
||||||
|
@ -82,24 +83,24 @@ if print_header:
|
||||||
|
|
||||||
timestamp = datetime.datetime.utcnow()
|
timestamp = datetime.datetime.utcnow()
|
||||||
|
|
||||||
stats, rl_stats = starlink_json.history_ping_stats(args[0] if args else "-",
|
g_stats, pd_stats, rl_stats = starlink_json.history_ping_stats(args[0] if args else "-",
|
||||||
-1 if parse_all else samples,
|
-1 if parse_all else samples,
|
||||||
verbose)
|
verbose)
|
||||||
|
|
||||||
if stats is None or rl_stats is None:
|
if g_stats is None:
|
||||||
# verbose output already happened, so just bail.
|
# verbose output already happened, so just bail.
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print("Parsed samples: " + str(stats["samples"]))
|
print("Parsed samples: " + str(g_stats["samples"]))
|
||||||
print("Total ping drop: " + str(stats["total_ping_drop"]))
|
print("Total ping drop: " + str(pd_stats["total_ping_drop"]))
|
||||||
print("Count of drop == 1: " + str(stats["count_full_ping_drop"]))
|
print("Count of drop == 1: " + str(pd_stats["count_full_ping_drop"]))
|
||||||
print("Obstructed: " + str(stats["count_obstructed"]))
|
print("Obstructed: " + str(pd_stats["count_obstructed"]))
|
||||||
print("Obstructed ping drop: " + str(stats["total_obstructed_ping_drop"]))
|
print("Obstructed ping drop: " + str(pd_stats["total_obstructed_ping_drop"]))
|
||||||
print("Obstructed drop == 1: " + str(stats["count_full_obstructed_ping_drop"]))
|
print("Obstructed drop == 1: " + str(pd_stats["count_full_obstructed_ping_drop"]))
|
||||||
print("Unscheduled: " + str(stats["count_unscheduled"]))
|
print("Unscheduled: " + str(pd_stats["count_unscheduled"]))
|
||||||
print("Unscheduled ping drop: " + str(stats["total_unscheduled_ping_drop"]))
|
print("Unscheduled ping drop: " + str(pd_stats["total_unscheduled_ping_drop"]))
|
||||||
print("Unscheduled drop == 1: " + str(stats["count_full_unscheduled_ping_drop"]))
|
print("Unscheduled drop == 1: " + str(pd_stats["count_full_unscheduled_ping_drop"]))
|
||||||
if run_lengths:
|
if run_lengths:
|
||||||
print("Initial drop run fragment: " + str(rl_stats["init_run_fragment"]))
|
print("Initial drop run fragment: " + str(rl_stats["init_run_fragment"]))
|
||||||
print("Final drop run fragment: " + str(rl_stats["final_run_fragment"]))
|
print("Final drop run fragment: " + str(rl_stats["final_run_fragment"]))
|
||||||
|
@ -107,7 +108,8 @@ if verbose:
|
||||||
print("Per-minute drop runs: " + ", ".join(str(x) for x in rl_stats["run_minutes"]))
|
print("Per-minute drop runs: " + ", ".join(str(x) for x in rl_stats["run_minutes"]))
|
||||||
else:
|
else:
|
||||||
csv_data = [timestamp.replace(microsecond=0).isoformat()]
|
csv_data = [timestamp.replace(microsecond=0).isoformat()]
|
||||||
csv_data.extend(str(stats[field]) for field in fields)
|
csv_data.extend(str(g_stats[field]) for field in g_fields)
|
||||||
|
csv_data.extend(str(pd_stats[field]) for field in pd_fields)
|
||||||
if run_lengths:
|
if run_lengths:
|
||||||
for field in rl_fields:
|
for field in rl_fields:
|
||||||
if field.startswith("run_"):
|
if field.startswith("run_"):
|
||||||
|
|
|
@ -4,13 +4,17 @@ This module may eventually contain more expansive parsing logic, but for now
|
||||||
it contains functions to parse the history data for some specific packet loss
|
it contains functions to parse the history data for some specific packet loss
|
||||||
statistics.
|
statistics.
|
||||||
|
|
||||||
General ping drop (packet loss) statistics:
|
General statistics:
|
||||||
This group of statistics characterize the packet loss (labeled "ping drop"
|
This group of statistics contains data relevant to all the other groups.
|
||||||
in the field names of the Starlink gRPC service protocol) in various ways.
|
|
||||||
|
|
||||||
The sample interval is currently 1 second.
|
The sample interval is currently 1 second.
|
||||||
|
|
||||||
samples: The number of valid samples analyzed.
|
samples: The number of valid samples analyzed.
|
||||||
|
|
||||||
|
General ping drop (packet loss) statistics:
|
||||||
|
This group of statistics characterize the packet loss (labeled "ping drop"
|
||||||
|
in the field names of the Starlink gRPC service protocol) in various ways.
|
||||||
|
|
||||||
total_ping_drop: The total amount of time, in sample intervals, that
|
total_ping_drop: The total amount of time, in sample intervals, that
|
||||||
experienced ping drop.
|
experienced ping drop.
|
||||||
count_full_ping_drop: The number of samples that experienced 100%
|
count_full_ping_drop: The number of samples that experienced 100%
|
||||||
|
@ -62,13 +66,13 @@ Ping drop run length statistics:
|
||||||
|
|
||||||
No sample should be counted in more than one of the run length stats or
|
No sample should be counted in more than one of the run length stats or
|
||||||
stat elements, so the total of all of them should be equal to
|
stat elements, so the total of all of them should be equal to
|
||||||
count_full_ping_drop from the general stats.
|
count_full_ping_drop from the ping drop stats.
|
||||||
|
|
||||||
Samples that experience less than 100% ping drop are not counted in this
|
Samples that experience less than 100% ping drop are not counted in this
|
||||||
group of stats, even if they happen at the beginning or end of a run of
|
group of stats, even if they happen at the beginning or end of a run of
|
||||||
100% ping drop samples. To compute the amount of time that experienced
|
100% ping drop samples. To compute the amount of time that experienced
|
||||||
ping loss in less than a single run of 100% ping drop, use
|
ping loss in less than a single run of 100% ping drop, use
|
||||||
(total_ping_drop - count_full_ping_drop) from the general stats.
|
(total_ping_drop - count_full_ping_drop) from the ping drop stats.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
@ -82,11 +86,13 @@ def history_ping_field_names():
|
||||||
"""Return the field names of the packet loss stats.
|
"""Return the field names of the packet loss stats.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A tuple with 2 lists, the first with general stat names and the
|
A tuple with 3 lists, the first with general stat names, the second
|
||||||
second with ping drop run length stat names.
|
with ping drop stat names, and the third with ping drop run length
|
||||||
|
stat names.
|
||||||
"""
|
"""
|
||||||
return [
|
return [
|
||||||
"samples",
|
"samples"
|
||||||
|
], [
|
||||||
"total_ping_drop",
|
"total_ping_drop",
|
||||||
"count_full_ping_drop",
|
"count_full_ping_drop",
|
||||||
"count_obstructed",
|
"count_obstructed",
|
||||||
|
@ -122,11 +128,12 @@ def history_ping_stats(parse_samples, verbose=False):
|
||||||
verbose (bool): Optionally produce verbose output.
|
verbose (bool): Optionally produce verbose output.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
On success, a tuple with 2 dicts, the first mapping general stat names
|
On success, a tuple with 3 dicts, the first mapping general stat names
|
||||||
to their values and the second mapping ping drop run length stat names
|
to their values, the second mapping ping drop stat names to their
|
||||||
to their values.
|
values and the third mapping ping drop run length stat names to their
|
||||||
|
values.
|
||||||
|
|
||||||
On failure, the tuple (None, None).
|
On failure, the tuple (None, None, None).
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
history = get_history()
|
history = get_history()
|
||||||
|
@ -134,7 +141,7 @@ def history_ping_stats(parse_samples, verbose=False):
|
||||||
if verbose:
|
if verbose:
|
||||||
# RpcError is too verbose to print the details.
|
# RpcError is too verbose to print the details.
|
||||||
print("Failed getting history")
|
print("Failed getting history")
|
||||||
return None, None
|
return None, None, None
|
||||||
|
|
||||||
# 'current' is the count of data samples written to the ring buffer,
|
# 'current' is the count of data samples written to the ring buffer,
|
||||||
# irrespective of buffer wrap.
|
# irrespective of buffer wrap.
|
||||||
|
@ -218,7 +225,8 @@ def history_ping_stats(parse_samples, verbose=False):
|
||||||
run_length = 0
|
run_length = 0
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"samples": parse_samples,
|
"samples": parse_samples
|
||||||
|
}, {
|
||||||
"total_ping_drop": tot,
|
"total_ping_drop": tot,
|
||||||
"count_full_ping_drop": count_full_drop,
|
"count_full_ping_drop": count_full_drop,
|
||||||
"count_obstructed": count_obstruct,
|
"count_obstructed": count_obstruct,
|
||||||
|
|
|
@ -18,11 +18,13 @@ def history_ping_field_names():
|
||||||
"""Return the field names of the packet loss stats.
|
"""Return the field names of the packet loss stats.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A tuple with 2 lists, the first with general stat names and the
|
A tuple with 3 lists, the first with general stat names, the second
|
||||||
second with ping drop run length stat names.
|
with ping drop stat names, and the third with ping drop run length
|
||||||
|
stat names.
|
||||||
"""
|
"""
|
||||||
return [
|
return [
|
||||||
"samples",
|
"samples"
|
||||||
|
], [
|
||||||
"total_ping_drop",
|
"total_ping_drop",
|
||||||
"count_full_ping_drop",
|
"count_full_ping_drop",
|
||||||
"count_obstructed",
|
"count_obstructed",
|
||||||
|
@ -66,18 +68,19 @@ def history_ping_stats(filename, parse_samples, verbose=False):
|
||||||
verbose (bool): Optionally produce verbose output.
|
verbose (bool): Optionally produce verbose output.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
On success, a tuple with 2 dicts, the first mapping general stat names
|
On success, a tuple with 3 dicts, the first mapping general stat names
|
||||||
to their values and the second mapping ping drop run length stat names
|
to their values, the second mapping ping drop stat names to their
|
||||||
to their values.
|
values and the third mapping ping drop run length stat names to their
|
||||||
|
values.
|
||||||
|
|
||||||
On failure, the tuple (None, None).
|
On failure, the tuple (None, None, None).
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
history = get_history(filename)
|
history = get_history(filename)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if verbose:
|
if verbose:
|
||||||
print("Failed getting history: " + str(e))
|
print("Failed getting history: " + str(e))
|
||||||
return None, None
|
return None, None, None
|
||||||
|
|
||||||
# "current" is the count of data samples written to the ring buffer,
|
# "current" is the count of data samples written to the ring buffer,
|
||||||
# irrespective of buffer wrap.
|
# irrespective of buffer wrap.
|
||||||
|
@ -161,7 +164,8 @@ def history_ping_stats(filename, parse_samples, verbose=False):
|
||||||
run_length = 0
|
run_length = 0
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"samples": parse_samples,
|
"samples": parse_samples
|
||||||
|
}, {
|
||||||
"total_ping_drop": tot,
|
"total_ping_drop": tot,
|
||||||
"count_full_ping_drop": count_full_drop,
|
"count_full_ping_drop": count_full_drop,
|
||||||
"count_obstructed": count_obstruct,
|
"count_obstructed": count_obstruct,
|
||||||
|
|
Loading…
Reference in a new issue