From 3fafcea8829bb0fb584f52771f1bc6cdbad5a118 Mon Sep 17 00:00:00 2001 From: sparky8512 <76499194+sparky8512@users.noreply.github.com> Date: Fri, 15 Jan 2021 19:27:10 -0800 Subject: [PATCH] Fix remaining pylint and yapf nits --- dishHistoryInflux.py | 12 ++++++------ dishHistoryMqtt.py | 12 ++++++------ dishHistoryStats.py | 2 +- dishStatusInflux.py | 8 ++++---- dishStatusMqtt.py | 8 ++++---- parseJsonHistory.py | 2 +- starlink_grpc.py | 18 +++++++++++------- starlink_json.py | 16 +++++++++------- 8 files changed, 42 insertions(+), 36 deletions(-) diff --git a/dishHistoryInflux.py b/dishHistoryInflux.py index b1a9b3e..07e43f7 100644 --- a/dishHistoryInflux.py +++ b/dishHistoryInflux.py @@ -152,13 +152,13 @@ def main(): gstate.dish_id = None gstate.points = [] - def conn_error(msg): + def conn_error(msg, *args): # Connection errors that happen in an interval loop are not critical # failures, but are interesting enough to print in non-verbose mode. if loop_time > 0: - print(msg) + print(msg % args) else: - logging.error(msg) + logging.error(msg, *args) def flush_points(client): try: @@ -167,7 +167,7 @@ def main(): print("Data points written: " + str(len(gstate.points))) gstate.points.clear() except Exception as e: - conn_error("Failed writing to InfluxDB database: " + str(e)) + conn_error("Failed writing to InfluxDB database: %s", str(e)) return 1 return 0 @@ -179,7 +179,7 @@ def main(): if verbose: print("Using dish ID: " + gstate.dish_id) except starlink_grpc.GrpcError as e: - conn_error("Failure getting dish ID: " + str(e)) + conn_error("Failure getting dish ID: %s", str(e)) return 1 timestamp = datetime.datetime.utcnow() @@ -187,7 +187,7 @@ def main(): try: g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose) except starlink_grpc.GrpcError as e: - conn_error("Failure getting ping stats: " + str(e)) + conn_error("Failure getting ping stats: %s", str(e)) return 1 all_stats = g_stats.copy() diff --git a/dishHistoryMqtt.py b/dishHistoryMqtt.py index 1e7b855..a4349d4 100644 --- a/dishHistoryMqtt.py +++ b/dishHistoryMqtt.py @@ -124,13 +124,13 @@ def main(): gstate = GlobalState() gstate.dish_id = None - def conn_error(msg): + def conn_error(msg, *args): # Connection errors that happen in an interval loop are not critical # failures, but are interesting enough to print in non-verbose mode. if loop_time > 0: - print(msg) + print(msg % args) else: - logging.error(msg) + logging.error(msg, *args) def loop_body(): if gstate.dish_id is None: @@ -139,13 +139,13 @@ def main(): if verbose: print("Using dish ID: " + gstate.dish_id) except starlink_grpc.GrpcError as e: - conn_error("Failure getting dish ID: " + str(e)) + conn_error("Failure getting dish ID: %s", str(e)) return 1 try: g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose) except starlink_grpc.GrpcError as e: - conn_error("Failure getting ping stats: " + str(e)) + conn_error("Failure getting ping stats: %s", str(e)) return 1 topic_prefix = "starlink/dish_ping_stats/" + gstate.dish_id + "/" @@ -163,7 +163,7 @@ def main(): if verbose: print("Successfully published to MQTT broker") except Exception as e: - conn_error("Failed publishing to MQTT broker: " + str(e)) + conn_error("Failed publishing to MQTT broker: %s", str(e)) return 1 return 0 diff --git a/dishHistoryStats.py b/dishHistoryStats.py index f08ac84..45a4ee1 100644 --- a/dishHistoryStats.py +++ b/dishHistoryStats.py @@ -98,7 +98,7 @@ def main(): try: g_stats, pd_stats, rl_stats = starlink_grpc.history_ping_stats(samples, verbose) except starlink_grpc.GrpcError as e: - logging.error("Failure getting ping stats: " + str(e)) + logging.error("Failure getting ping stats: %s", str(e)) return 1 if verbose: diff --git a/dishStatusInflux.py b/dishStatusInflux.py index 0c00b41..6a708f3 100644 --- a/dishStatusInflux.py +++ b/dishStatusInflux.py @@ -159,13 +159,13 @@ def main(): tags = ["id"] retention_policy = rp - def conn_error(msg): + def conn_error(msg, *args): # Connection errors that happen in an interval loop are not critical # failures, but are interesting enough to print in non-verbose mode. if loop_time > 0: - print(msg) + print(msg % args) else: - logging.error(msg) + logging.error(msg, *args) def flush_pending(client): try: @@ -174,7 +174,7 @@ def main(): print("Data points written: " + str(gstate.pending)) gstate.pending = 0 except Exception as e: - conn_error("Failed writing to InfluxDB database: " + str(e)) + conn_error("Failed writing to InfluxDB database: %s", str(e)) return 1 return 0 diff --git a/dishStatusMqtt.py b/dishStatusMqtt.py index ce84ab6..06a1324 100644 --- a/dishStatusMqtt.py +++ b/dishStatusMqtt.py @@ -107,13 +107,13 @@ def main(): gstate = GlobalState() gstate.dish_id = None - def conn_error(msg): + def conn_error(msg, *args): # Connection errors that happen in an interval loop are not critical # failures, but are interesting enough to print in non-verbose mode. if loop_time > 0: - print(msg) + print(msg % args) else: - logging.error(msg) + logging.error(msg, *args) def loop_body(): try: @@ -166,7 +166,7 @@ def main(): if verbose: print("Successfully published to MQTT broker") except Exception as e: - conn_error("Failed publishing to MQTT broker: " + str(e)) + conn_error("Failed publishing to MQTT broker: %s", str(e)) return 1 return 0 diff --git a/parseJsonHistory.py b/parseJsonHistory.py index 50fe1ff..e12d676 100644 --- a/parseJsonHistory.py +++ b/parseJsonHistory.py @@ -89,7 +89,7 @@ try: g_stats, pd_stats, rl_stats = starlink_json.history_ping_stats(args[0] if args else "-", samples, verbose) except starlink_json.JsonError as e: - logging.error("Failure getting ping stats: " + str(e)) + logging.error("Failure getting ping stats: %s", str(e)) sys.exit(1) if verbose: diff --git a/starlink_grpc.py b/starlink_grpc.py index ec65b14..40e3572 100644 --- a/starlink_grpc.py +++ b/starlink_grpc.py @@ -108,6 +108,7 @@ def get_status(): response = stub.Handle(spacex.api.device.device_pb2.Request(get_status={})) return response.dish_get_status + def get_id(): """Return the ID from the dish status information. @@ -124,6 +125,7 @@ def get_id(): except grpc.RpcError as e: raise GrpcError(e) + def history_ping_field_names(): """Return the field names of the packet loss stats. @@ -133,7 +135,7 @@ def history_ping_field_names(): stat names. """ return [ - "samples" + "samples", ], [ "total_ping_drop", "count_full_ping_drop", @@ -142,14 +144,15 @@ def history_ping_field_names(): "count_full_obstructed_ping_drop", "count_unscheduled", "total_unscheduled_ping_drop", - "count_full_unscheduled_ping_drop" + "count_full_unscheduled_ping_drop", ], [ "init_run_fragment", "final_run_fragment", "run_seconds", - "run_minutes" + "run_minutes", ] + def get_history(): """Fetch history data and return it in grpc structure format. @@ -161,6 +164,7 @@ def get_history(): response = stub.Handle(spacex.api.device.device_pb2.Request(get_history={})) return response.dish_get_history + def history_ping_stats(parse_samples, verbose=False): """Fetch, parse, and compute the packet loss stats. @@ -239,7 +243,7 @@ def history_ping_stats(parse_samples, verbose=False): if run_length <= 60: second_runs[run_length - 1] += run_length else: - minute_runs[min((run_length - 1)//60 - 1, 59)] += run_length + minute_runs[min((run_length-1) // 60 - 1, 59)] += run_length run_length = 0 elif init_run_length is None: init_run_length = 0 @@ -267,7 +271,7 @@ def history_ping_stats(parse_samples, verbose=False): run_length = 0 return { - "samples": parse_samples + "samples": parse_samples, }, { "total_ping_drop": tot, "count_full_ping_drop": count_full_drop, @@ -276,10 +280,10 @@ def history_ping_stats(parse_samples, verbose=False): "count_full_obstructed_ping_drop": count_full_obstruct, "count_unscheduled": count_unsched, "total_unscheduled_ping_drop": total_unsched_drop, - "count_full_unscheduled_ping_drop": count_full_unsched + "count_full_unscheduled_ping_drop": count_full_unsched, }, { "init_run_fragment": init_run_length, "final_run_fragment": run_length, "run_seconds": second_runs, - "run_minutes": minute_runs + "run_minutes": minute_runs, } diff --git a/starlink_json.py b/starlink_json.py index 7396c5a..7365430 100644 --- a/starlink_json.py +++ b/starlink_json.py @@ -28,7 +28,7 @@ def history_ping_field_names(): stat names. """ return [ - "samples" + "samples", ], [ "total_ping_drop", "count_full_ping_drop", @@ -37,14 +37,15 @@ def history_ping_field_names(): "count_full_obstructed_ping_drop", "count_unscheduled", "total_unscheduled_ping_drop", - "count_full_unscheduled_ping_drop" + "count_full_unscheduled_ping_drop", ], [ "init_run_fragment", "final_run_fragment", "run_seconds", - "run_minutes" + "run_minutes", ] + def get_history(filename): """Read JSON data and return the raw history in dict format. @@ -63,6 +64,7 @@ def get_history(filename): json_data = json.load(json_file) return json_data["dishGetHistory"] + def history_ping_stats(filename, parse_samples, verbose=False): """Fetch, parse, and compute the packet loss stats. @@ -144,7 +146,7 @@ def history_ping_stats(filename, parse_samples, verbose=False): if run_length <= 60: second_runs[run_length - 1] += run_length else: - minute_runs[min((run_length - 1)//60 - 1, 59)] += run_length + minute_runs[min((run_length-1) // 60 - 1, 59)] += run_length run_length = 0 elif init_run_length is None: init_run_length = 0 @@ -172,7 +174,7 @@ def history_ping_stats(filename, parse_samples, verbose=False): run_length = 0 return { - "samples": parse_samples + "samples": parse_samples, }, { "total_ping_drop": tot, "count_full_ping_drop": count_full_drop, @@ -181,10 +183,10 @@ def history_ping_stats(filename, parse_samples, verbose=False): "count_full_obstructed_ping_drop": count_full_obstruct, "count_unscheduled": count_unsched, "total_unscheduled_ping_drop": total_unsched_drop, - "count_full_unscheduled_ping_drop": count_full_unsched + "count_full_unscheduled_ping_drop": count_full_unsched, }, { "init_run_fragment": init_run_length, "final_run_fragment": run_length, "run_seconds": second_runs, - "run_minutes": minute_runs + "run_minutes": minute_runs, }