Fix line endings to be consistent with the other scripts

This commit is contained in:
sparky8512 2020-12-30 13:01:41 -08:00
parent f206a3ad91
commit 4ff6cfb5fa

View file

@ -1,190 +1,190 @@
#!/usr/bin/python #!/usr/bin/python
###################################################################### ######################################################################
# #
# Example parser for the JSON format history stats output of grpcurl # Example parser for the JSON format history stats output of grpcurl
# for the gRPC service provided on a Starlink user terminal. # for the gRPC service provided on a Starlink user terminal.
# #
# Expects input as from the following command: # Expects input as from the following command:
# grpcurl -plaintext -d {\"get_history\":{}} 192.168.100.1:9200 SpaceX.API.Device.Device/Handle # grpcurl -plaintext -d {\"get_history\":{}} 192.168.100.1:9200 SpaceX.API.Device.Device/Handle
# #
# This script examines the most recent samples from the history data # This script examines the most recent samples from the history data
# and computes several different metrics related to packet loss. By # and computes several different metrics related to packet loss. By
# default, it will print the results in CSV format. # default, it will print the results in CSV format.
# #
###################################################################### ######################################################################
import json import json
import datetime import datetime
import sys import sys
import getopt import getopt
from itertools import chain from itertools import chain
fArgError = False fArgError = False
try: try:
opts, args = getopt.getopt(sys.argv[1:], "ahrs:vH") opts, args = getopt.getopt(sys.argv[1:], "ahrs:vH")
except getopt.GetoptError as err: except getopt.GetoptError as err:
print(str(err)) print(str(err))
fArgError = True fArgError = True
# Default to 1 hour worth of data samples. # Default to 1 hour worth of data samples.
parseSamples = 3600 parseSamples = 3600
fUsage = False fUsage = False
fVerbose = False fVerbose = False
fParseAll = False fParseAll = False
fHeader = False fHeader = False
fRunLengths = False fRunLengths = False
if not fArgError: if not fArgError:
if len(args) > 1: if len(args) > 1:
fArgError = True fArgError = True
else: else:
for opt, arg in opts: for opt, arg in opts:
if opt == "-a": if opt == "-a":
fParseAll = True fParseAll = True
elif opt == "-h": elif opt == "-h":
fUsage = True fUsage = True
elif opt == "-r": elif opt == "-r":
fRunLengths = True fRunLengths = True
elif opt == "-s": elif opt == "-s":
parseSamples = int(arg) parseSamples = int(arg)
elif opt == "-v": elif opt == "-v":
fVerbose = True fVerbose = True
elif opt == "-H": elif opt == "-H":
fHeader = True fHeader = True
if fUsage or fArgError: if fUsage or fArgError:
print("Usage: "+sys.argv[0]+" [options...] [<file>]") print("Usage: "+sys.argv[0]+" [options...] [<file>]")
print(" where <file> is the file to parse, default: stdin") print(" where <file> is the file to parse, default: stdin")
print("Options:") print("Options:")
print(" -a: Parse all valid samples") print(" -a: Parse all valid samples")
print(" -h: Be helpful") print(" -h: Be helpful")
print(" -r: Include ping drop run length stats") print(" -r: Include ping drop run length stats")
print(" -s <num>: Parse <num> data samples, default: "+str(parseSamples)) print(" -s <num>: Parse <num> data samples, default: "+str(parseSamples))
print(" -v: Be verbose") print(" -v: Be verbose")
print(" -H: print CSV header instead of parsing file") print(" -H: print CSV header instead of parsing file")
sys.exit(1 if fArgError else 0) sys.exit(1 if fArgError else 0)
if fHeader: if fHeader:
header = "datetimestamp_utc,samples,total_ping_drop,count_full_ping_drop,count_obstructed,total_obstructed_ping_drop,count_full_obstructed_ping_drop,count_unscheduled,total_unscheduled_ping_drop,count_full_unscheduled_ping_drop" header = "datetimestamp_utc,samples,total_ping_drop,count_full_ping_drop,count_obstructed,total_obstructed_ping_drop,count_full_obstructed_ping_drop,count_unscheduled,total_unscheduled_ping_drop,count_full_unscheduled_ping_drop"
if fRunLengths: if fRunLengths:
header+= ",init_run_fragment,final_run_fragment," header+= ",init_run_fragment,final_run_fragment,"
header += ",".join("run_seconds_" + str(x) for x in range(1, 61)) + "," header += ",".join("run_seconds_" + str(x) for x in range(1, 61)) + ","
header += ",".join("run_minutes_" + str(x) for x in range(1, 60)) header += ",".join("run_minutes_" + str(x) for x in range(1, 60))
header += ",run_minutes_60_or_greater" header += ",run_minutes_60_or_greater"
print(header) print(header)
sys.exit(0) sys.exit(0)
# Allow "-" to be specified as file for stdin. # Allow "-" to be specified as file for stdin.
if len(args) == 0 or args[0] == "-": if len(args) == 0 or args[0] == "-":
jsonData = json.load(sys.stdin) jsonData = json.load(sys.stdin)
else: else:
jsonFile = open(args[0]) jsonFile = open(args[0])
jsonData = json.load(jsonFile) jsonData = json.load(jsonFile)
jsonFile.close() jsonFile.close()
timestamp = datetime.datetime.utcnow() timestamp = datetime.datetime.utcnow()
historyData = jsonData['dishGetHistory'] historyData = jsonData['dishGetHistory']
# 'current' is the count of data samples written to the ring buffer, # 'current' is the count of data samples written to the ring buffer,
# irrespective of buffer wrap. # irrespective of buffer wrap.
current = int(historyData['current']) current = int(historyData['current'])
nSamples = len(historyData['popPingDropRate']) nSamples = len(historyData['popPingDropRate'])
if fVerbose: if fVerbose:
print("current: " + str(current)) print("current: " + str(current))
print("All samples: " + str(nSamples)) print("All samples: " + str(nSamples))
nSamples = min(nSamples,current) nSamples = min(nSamples,current)
if fVerbose: if fVerbose:
print("Valid samples: " + str(nSamples)) print("Valid samples: " + str(nSamples))
# This is ring buffer offset, so both index to oldest data sample and # This is ring buffer offset, so both index to oldest data sample and
# index to next data sample after the newest one. # index to next data sample after the newest one.
offset = current % nSamples offset = current % nSamples
tot = 0 tot = 0
totOne = 0 totOne = 0
totUnsched = 0 totUnsched = 0
totUnschedD = 0 totUnschedD = 0
totUnschedOne = 0 totUnschedOne = 0
totObstruct = 0 totObstruct = 0
totObstructD = 0 totObstructD = 0
totObstructOne = 0 totObstructOne = 0
secondRuns = [0] * 60 secondRuns = [0] * 60
minuteRuns = [0] * 60 minuteRuns = [0] * 60
runLength = 0 runLength = 0
initRun = None initRun = None
if fParseAll or nSamples < parseSamples: if fParseAll or nSamples < parseSamples:
parseSamples = nSamples parseSamples = nSamples
# Parse the most recent parseSamples-sized set of samples. This will # Parse the most recent parseSamples-sized set of samples. This will
# iterate samples in order from oldest to newest, although that's not # iterate samples in order from oldest to newest, although that's not
# actually required for the current set of stats being computed below. # actually required for the current set of stats being computed below.
if parseSamples <= offset: if parseSamples <= offset:
sampleRange = range(offset - parseSamples, offset) sampleRange = range(offset - parseSamples, offset)
else: else:
sampleRange = chain(range(nSamples + offset - parseSamples, nSamples), range(0, offset)) sampleRange = chain(range(nSamples + offset - parseSamples, nSamples), range(0, offset))
for i in sampleRange: for i in sampleRange:
d = historyData["popPingDropRate"][i] d = historyData["popPingDropRate"][i]
tot += d tot += d
if d >= 1: if d >= 1:
totOne += d totOne += d
runLength += 1 runLength += 1
elif runLength > 0: elif runLength > 0:
if initRun is None: if initRun is None:
initRun = runLength initRun = runLength
else: else:
if runLength <= 60: if runLength <= 60:
secondRuns[runLength-1] += runLength secondRuns[runLength-1] += runLength
else: else:
minuteRuns[min((runLength-1)//60-1, 59)] += runLength minuteRuns[min((runLength-1)//60-1, 59)] += runLength
runLength = 0 runLength = 0
elif initRun is None: elif initRun is None:
initRun = 0 initRun = 0
if not historyData["scheduled"][i]: if not historyData["scheduled"][i]:
totUnsched += 1 totUnsched += 1
totUnschedD += d totUnschedD += d
if d >= 1: if d >= 1:
totUnschedOne += d totUnschedOne += d
if historyData["obstructed"][i]: if historyData["obstructed"][i]:
totObstruct += 1 totObstruct += 1
totObstructD += d totObstructD += d
if d >= 1: if d >= 1:
totObstructOne += d totObstructOne += d
# If the entire sample set is one big drop run, it will be both initial # If the entire sample set is one big drop run, it will be both initial
# fragment (continued from prior sample range) and final one (continued # fragment (continued from prior sample range) and final one (continued
# to next sample range), but to avoid double-reporting, just call it # to next sample range), but to avoid double-reporting, just call it
# the initial run. # the initial run.
if initRun is None: if initRun is None:
initRun = runLength initRun = runLength
runLength = 0 runLength = 0
if fVerbose: if fVerbose:
print("Parsed samples: " + str(parseSamples)) print("Parsed samples: " + str(parseSamples))
print("Total ping drop: " + str(tot)) print("Total ping drop: " + str(tot))
print("Count of drop == 1: " + str(totOne)) print("Count of drop == 1: " + str(totOne))
print("Obstructed: " + str(totObstruct)) print("Obstructed: " + str(totObstruct))
print("Obstructed ping drop: " + str(totObstructD)) print("Obstructed ping drop: " + str(totObstructD))
print("Obstructed drop == 1: " + str(totObstructOne)) print("Obstructed drop == 1: " + str(totObstructOne))
print("Unscheduled: " + str(totUnsched)) print("Unscheduled: " + str(totUnsched))
print("Unscheduled ping drop: " + str(totUnschedD)) print("Unscheduled ping drop: " + str(totUnschedD))
print("Unscheduled drop == 1: " + str(totUnschedOne)) print("Unscheduled drop == 1: " + str(totUnschedOne))
if fRunLengths: if fRunLengths:
print("Initial drop run fragment: " + str(initRun)) print("Initial drop run fragment: " + str(initRun))
print("Final drop run fragment: " + str(runLength)) print("Final drop run fragment: " + str(runLength))
print("Per-second drop runs: " + ", ".join(str(x) for x in secondRuns)) print("Per-second drop runs: " + ", ".join(str(x) for x in secondRuns))
print("Per-minute drop runs: " + ", ".join(str(x) for x in minuteRuns)) print("Per-minute drop runs: " + ", ".join(str(x) for x in minuteRuns))
else: else:
# NOTE: When changing data output format, also change the -H header printing above. # NOTE: When changing data output format, also change the -H header printing above.
csvData = timestamp.replace(microsecond=0).isoformat() + "," + ",".join(str(x) for x in [parseSamples, tot, totOne, totObstruct, totObstructD, totObstructOne, totUnsched, totUnschedD, totUnschedOne]) csvData = timestamp.replace(microsecond=0).isoformat() + "," + ",".join(str(x) for x in [parseSamples, tot, totOne, totObstruct, totObstructD, totObstructOne, totUnsched, totUnschedD, totUnschedOne])
if fRunLengths: if fRunLengths:
csvData += "," + ",".join(str(x) for x in chain([initRun, runLength], secondRuns, minuteRuns)) csvData += "," + ",".join(str(x) for x in chain([initRun, runLength], secondRuns, minuteRuns))
print(csvData) print(csvData)