Fix remaining pylint and yapf nits
This commit is contained in:
parent
46f65a6214
commit
3fafcea882
8 changed files with 42 additions and 36 deletions
|
@ -152,13 +152,13 @@ def main():
|
||||||
gstate.dish_id = None
|
gstate.dish_id = None
|
||||||
gstate.points = []
|
gstate.points = []
|
||||||
|
|
||||||
def conn_error(msg):
|
def conn_error(msg, *args):
|
||||||
# Connection errors that happen in an interval loop are not critical
|
# Connection errors that happen in an interval loop are not critical
|
||||||
# failures, but are interesting enough to print in non-verbose mode.
|
# failures, but are interesting enough to print in non-verbose mode.
|
||||||
if loop_time > 0:
|
if loop_time > 0:
|
||||||
print(msg)
|
print(msg % args)
|
||||||
else:
|
else:
|
||||||
logging.error(msg)
|
logging.error(msg, *args)
|
||||||
|
|
||||||
def flush_points(client):
|
def flush_points(client):
|
||||||
try:
|
try:
|
||||||
|
@ -167,7 +167,7 @@ def main():
|
||||||
print("Data points written: " + str(len(gstate.points)))
|
print("Data points written: " + str(len(gstate.points)))
|
||||||
gstate.points.clear()
|
gstate.points.clear()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
conn_error("Failed writing to InfluxDB database: " + str(e))
|
conn_error("Failed writing to InfluxDB database: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
@ -179,7 +179,7 @@ def main():
|
||||||
if verbose:
|
if verbose:
|
||||||
print("Using dish ID: " + gstate.dish_id)
|
print("Using dish ID: " + gstate.dish_id)
|
||||||
except starlink_grpc.GrpcError as e:
|
except starlink_grpc.GrpcError as e:
|
||||||
conn_error("Failure getting dish ID: " + str(e))
|
conn_error("Failure getting dish ID: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
timestamp = datetime.datetime.utcnow()
|
timestamp = datetime.datetime.utcnow()
|
||||||
|
@ -187,7 +187,7 @@ def main():
|
||||||
try:
|
try:
|
||||||
g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose)
|
g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose)
|
||||||
except starlink_grpc.GrpcError as e:
|
except starlink_grpc.GrpcError as e:
|
||||||
conn_error("Failure getting ping stats: " + str(e))
|
conn_error("Failure getting ping stats: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
all_stats = g_stats.copy()
|
all_stats = g_stats.copy()
|
||||||
|
|
|
@ -124,13 +124,13 @@ def main():
|
||||||
gstate = GlobalState()
|
gstate = GlobalState()
|
||||||
gstate.dish_id = None
|
gstate.dish_id = None
|
||||||
|
|
||||||
def conn_error(msg):
|
def conn_error(msg, *args):
|
||||||
# Connection errors that happen in an interval loop are not critical
|
# Connection errors that happen in an interval loop are not critical
|
||||||
# failures, but are interesting enough to print in non-verbose mode.
|
# failures, but are interesting enough to print in non-verbose mode.
|
||||||
if loop_time > 0:
|
if loop_time > 0:
|
||||||
print(msg)
|
print(msg % args)
|
||||||
else:
|
else:
|
||||||
logging.error(msg)
|
logging.error(msg, *args)
|
||||||
|
|
||||||
def loop_body():
|
def loop_body():
|
||||||
if gstate.dish_id is None:
|
if gstate.dish_id is None:
|
||||||
|
@ -139,13 +139,13 @@ def main():
|
||||||
if verbose:
|
if verbose:
|
||||||
print("Using dish ID: " + gstate.dish_id)
|
print("Using dish ID: " + gstate.dish_id)
|
||||||
except starlink_grpc.GrpcError as e:
|
except starlink_grpc.GrpcError as e:
|
||||||
conn_error("Failure getting dish ID: " + str(e))
|
conn_error("Failure getting dish ID: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
try:
|
try:
|
||||||
g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose)
|
g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose)
|
||||||
except starlink_grpc.GrpcError as e:
|
except starlink_grpc.GrpcError as e:
|
||||||
conn_error("Failure getting ping stats: " + str(e))
|
conn_error("Failure getting ping stats: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
topic_prefix = "starlink/dish_ping_stats/" + gstate.dish_id + "/"
|
topic_prefix = "starlink/dish_ping_stats/" + gstate.dish_id + "/"
|
||||||
|
@ -163,7 +163,7 @@ def main():
|
||||||
if verbose:
|
if verbose:
|
||||||
print("Successfully published to MQTT broker")
|
print("Successfully published to MQTT broker")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
conn_error("Failed publishing to MQTT broker: " + str(e))
|
conn_error("Failed publishing to MQTT broker: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
|
@ -98,7 +98,7 @@ def main():
|
||||||
try:
|
try:
|
||||||
g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose)
|
g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose)
|
||||||
except starlink_grpc.GrpcError as e:
|
except starlink_grpc.GrpcError as e:
|
||||||
logging.error("Failure getting ping stats: " + str(e))
|
logging.error("Failure getting ping stats: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
|
|
|
@ -159,13 +159,13 @@ def main():
|
||||||
tags = ["id"]
|
tags = ["id"]
|
||||||
retention_policy = rp
|
retention_policy = rp
|
||||||
|
|
||||||
def conn_error(msg):
|
def conn_error(msg, *args):
|
||||||
# Connection errors that happen in an interval loop are not critical
|
# Connection errors that happen in an interval loop are not critical
|
||||||
# failures, but are interesting enough to print in non-verbose mode.
|
# failures, but are interesting enough to print in non-verbose mode.
|
||||||
if loop_time > 0:
|
if loop_time > 0:
|
||||||
print(msg)
|
print(msg % args)
|
||||||
else:
|
else:
|
||||||
logging.error(msg)
|
logging.error(msg, *args)
|
||||||
|
|
||||||
def flush_pending(client):
|
def flush_pending(client):
|
||||||
try:
|
try:
|
||||||
|
@ -174,7 +174,7 @@ def main():
|
||||||
print("Data points written: " + str(gstate.pending))
|
print("Data points written: " + str(gstate.pending))
|
||||||
gstate.pending = 0
|
gstate.pending = 0
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
conn_error("Failed writing to InfluxDB database: " + str(e))
|
conn_error("Failed writing to InfluxDB database: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
|
@ -107,13 +107,13 @@ def main():
|
||||||
gstate = GlobalState()
|
gstate = GlobalState()
|
||||||
gstate.dish_id = None
|
gstate.dish_id = None
|
||||||
|
|
||||||
def conn_error(msg):
|
def conn_error(msg, *args):
|
||||||
# Connection errors that happen in an interval loop are not critical
|
# Connection errors that happen in an interval loop are not critical
|
||||||
# failures, but are interesting enough to print in non-verbose mode.
|
# failures, but are interesting enough to print in non-verbose mode.
|
||||||
if loop_time > 0:
|
if loop_time > 0:
|
||||||
print(msg)
|
print(msg % args)
|
||||||
else:
|
else:
|
||||||
logging.error(msg)
|
logging.error(msg, *args)
|
||||||
|
|
||||||
def loop_body():
|
def loop_body():
|
||||||
try:
|
try:
|
||||||
|
@ -166,7 +166,7 @@ def main():
|
||||||
if verbose:
|
if verbose:
|
||||||
print("Successfully published to MQTT broker")
|
print("Successfully published to MQTT broker")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
conn_error("Failed publishing to MQTT broker: " + str(e))
|
conn_error("Failed publishing to MQTT broker: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
|
@ -89,7 +89,7 @@ try:
|
||||||
g_stats, pd_stats, rl_stats = starlink_json.history_ping_stats(args[0] if args else "-",
|
g_stats, pd_stats, rl_stats = starlink_json.history_ping_stats(args[0] if args else "-",
|
||||||
samples, verbose)
|
samples, verbose)
|
||||||
except starlink_json.JsonError as e:
|
except starlink_json.JsonError as e:
|
||||||
logging.error("Failure getting ping stats: " + str(e))
|
logging.error("Failure getting ping stats: %s", str(e))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
|
|
|
@ -108,6 +108,7 @@ def get_status():
|
||||||
response = stub.Handle(spacex.api.device.device_pb2.Request(get_status={}))
|
response = stub.Handle(spacex.api.device.device_pb2.Request(get_status={}))
|
||||||
return response.dish_get_status
|
return response.dish_get_status
|
||||||
|
|
||||||
|
|
||||||
def get_id():
|
def get_id():
|
||||||
"""Return the ID from the dish status information.
|
"""Return the ID from the dish status information.
|
||||||
|
|
||||||
|
@ -124,6 +125,7 @@ def get_id():
|
||||||
except grpc.RpcError as e:
|
except grpc.RpcError as e:
|
||||||
raise GrpcError(e)
|
raise GrpcError(e)
|
||||||
|
|
||||||
|
|
||||||
def history_ping_field_names():
|
def history_ping_field_names():
|
||||||
"""Return the field names of the packet loss stats.
|
"""Return the field names of the packet loss stats.
|
||||||
|
|
||||||
|
@ -133,7 +135,7 @@ def history_ping_field_names():
|
||||||
stat names.
|
stat names.
|
||||||
"""
|
"""
|
||||||
return [
|
return [
|
||||||
"samples"
|
"samples",
|
||||||
], [
|
], [
|
||||||
"total_ping_drop",
|
"total_ping_drop",
|
||||||
"count_full_ping_drop",
|
"count_full_ping_drop",
|
||||||
|
@ -142,14 +144,15 @@ def history_ping_field_names():
|
||||||
"count_full_obstructed_ping_drop",
|
"count_full_obstructed_ping_drop",
|
||||||
"count_unscheduled",
|
"count_unscheduled",
|
||||||
"total_unscheduled_ping_drop",
|
"total_unscheduled_ping_drop",
|
||||||
"count_full_unscheduled_ping_drop"
|
"count_full_unscheduled_ping_drop",
|
||||||
], [
|
], [
|
||||||
"init_run_fragment",
|
"init_run_fragment",
|
||||||
"final_run_fragment",
|
"final_run_fragment",
|
||||||
"run_seconds",
|
"run_seconds",
|
||||||
"run_minutes"
|
"run_minutes",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_history():
|
def get_history():
|
||||||
"""Fetch history data and return it in grpc structure format.
|
"""Fetch history data and return it in grpc structure format.
|
||||||
|
|
||||||
|
@ -161,6 +164,7 @@ def get_history():
|
||||||
response = stub.Handle(spacex.api.device.device_pb2.Request(get_history={}))
|
response = stub.Handle(spacex.api.device.device_pb2.Request(get_history={}))
|
||||||
return response.dish_get_history
|
return response.dish_get_history
|
||||||
|
|
||||||
|
|
||||||
def history_ping_stats(parse_samples, verbose=False):
|
def history_ping_stats(parse_samples, verbose=False):
|
||||||
"""Fetch, parse, and compute the packet loss stats.
|
"""Fetch, parse, and compute the packet loss stats.
|
||||||
|
|
||||||
|
@ -239,7 +243,7 @@ def history_ping_stats(parse_samples, verbose=False):
|
||||||
if run_length <= 60:
|
if run_length <= 60:
|
||||||
second_runs[run_length - 1] += run_length
|
second_runs[run_length - 1] += run_length
|
||||||
else:
|
else:
|
||||||
minute_runs[min((run_length - 1)//60 - 1, 59)] += run_length
|
minute_runs[min((run_length-1) // 60 - 1, 59)] += run_length
|
||||||
run_length = 0
|
run_length = 0
|
||||||
elif init_run_length is None:
|
elif init_run_length is None:
|
||||||
init_run_length = 0
|
init_run_length = 0
|
||||||
|
@ -267,7 +271,7 @@ def history_ping_stats(parse_samples, verbose=False):
|
||||||
run_length = 0
|
run_length = 0
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"samples": parse_samples
|
"samples": parse_samples,
|
||||||
}, {
|
}, {
|
||||||
"total_ping_drop": tot,
|
"total_ping_drop": tot,
|
||||||
"count_full_ping_drop": count_full_drop,
|
"count_full_ping_drop": count_full_drop,
|
||||||
|
@ -276,10 +280,10 @@ def history_ping_stats(parse_samples, verbose=False):
|
||||||
"count_full_obstructed_ping_drop": count_full_obstruct,
|
"count_full_obstructed_ping_drop": count_full_obstruct,
|
||||||
"count_unscheduled": count_unsched,
|
"count_unscheduled": count_unsched,
|
||||||
"total_unscheduled_ping_drop": total_unsched_drop,
|
"total_unscheduled_ping_drop": total_unsched_drop,
|
||||||
"count_full_unscheduled_ping_drop": count_full_unsched
|
"count_full_unscheduled_ping_drop": count_full_unsched,
|
||||||
}, {
|
}, {
|
||||||
"init_run_fragment": init_run_length,
|
"init_run_fragment": init_run_length,
|
||||||
"final_run_fragment": run_length,
|
"final_run_fragment": run_length,
|
||||||
"run_seconds": second_runs,
|
"run_seconds": second_runs,
|
||||||
"run_minutes": minute_runs
|
"run_minutes": minute_runs,
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ def history_ping_field_names():
|
||||||
stat names.
|
stat names.
|
||||||
"""
|
"""
|
||||||
return [
|
return [
|
||||||
"samples"
|
"samples",
|
||||||
], [
|
], [
|
||||||
"total_ping_drop",
|
"total_ping_drop",
|
||||||
"count_full_ping_drop",
|
"count_full_ping_drop",
|
||||||
|
@ -37,14 +37,15 @@ def history_ping_field_names():
|
||||||
"count_full_obstructed_ping_drop",
|
"count_full_obstructed_ping_drop",
|
||||||
"count_unscheduled",
|
"count_unscheduled",
|
||||||
"total_unscheduled_ping_drop",
|
"total_unscheduled_ping_drop",
|
||||||
"count_full_unscheduled_ping_drop"
|
"count_full_unscheduled_ping_drop",
|
||||||
], [
|
], [
|
||||||
"init_run_fragment",
|
"init_run_fragment",
|
||||||
"final_run_fragment",
|
"final_run_fragment",
|
||||||
"run_seconds",
|
"run_seconds",
|
||||||
"run_minutes"
|
"run_minutes",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_history(filename):
|
def get_history(filename):
|
||||||
"""Read JSON data and return the raw history in dict format.
|
"""Read JSON data and return the raw history in dict format.
|
||||||
|
|
||||||
|
@ -63,6 +64,7 @@ def get_history(filename):
|
||||||
json_data = json.load(json_file)
|
json_data = json.load(json_file)
|
||||||
return json_data["dishGetHistory"]
|
return json_data["dishGetHistory"]
|
||||||
|
|
||||||
|
|
||||||
def history_ping_stats(filename, parse_samples, verbose=False):
|
def history_ping_stats(filename, parse_samples, verbose=False):
|
||||||
"""Fetch, parse, and compute the packet loss stats.
|
"""Fetch, parse, and compute the packet loss stats.
|
||||||
|
|
||||||
|
@ -144,7 +146,7 @@ def history_ping_stats(filename, parse_samples, verbose=False):
|
||||||
if run_length <= 60:
|
if run_length <= 60:
|
||||||
second_runs[run_length - 1] += run_length
|
second_runs[run_length - 1] += run_length
|
||||||
else:
|
else:
|
||||||
minute_runs[min((run_length - 1)//60 - 1, 59)] += run_length
|
minute_runs[min((run_length-1) // 60 - 1, 59)] += run_length
|
||||||
run_length = 0
|
run_length = 0
|
||||||
elif init_run_length is None:
|
elif init_run_length is None:
|
||||||
init_run_length = 0
|
init_run_length = 0
|
||||||
|
@ -172,7 +174,7 @@ def history_ping_stats(filename, parse_samples, verbose=False):
|
||||||
run_length = 0
|
run_length = 0
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"samples": parse_samples
|
"samples": parse_samples,
|
||||||
}, {
|
}, {
|
||||||
"total_ping_drop": tot,
|
"total_ping_drop": tot,
|
||||||
"count_full_ping_drop": count_full_drop,
|
"count_full_ping_drop": count_full_drop,
|
||||||
|
@ -181,10 +183,10 @@ def history_ping_stats(filename, parse_samples, verbose=False):
|
||||||
"count_full_obstructed_ping_drop": count_full_obstruct,
|
"count_full_obstructed_ping_drop": count_full_obstruct,
|
||||||
"count_unscheduled": count_unsched,
|
"count_unscheduled": count_unsched,
|
||||||
"total_unscheduled_ping_drop": total_unsched_drop,
|
"total_unscheduled_ping_drop": total_unsched_drop,
|
||||||
"count_full_unscheduled_ping_drop": count_full_unsched
|
"count_full_unscheduled_ping_drop": count_full_unsched,
|
||||||
}, {
|
}, {
|
||||||
"init_run_fragment": init_run_length,
|
"init_run_fragment": init_run_length,
|
||||||
"final_run_fragment": run_length,
|
"final_run_fragment": run_length,
|
||||||
"run_seconds": second_runs,
|
"run_seconds": second_runs,
|
||||||
"run_minutes": minute_runs
|
"run_minutes": minute_runs,
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue