Compare commits
10 commits
8cbab00517
...
546aeb83f6
Author | SHA1 | Date | |
---|---|---|---|
546aeb83f6 | |||
|
5cbc5b73dd | ||
|
955e2e0b5d | ||
|
237cc349cf | ||
|
a8717d9549 | ||
|
2eebbfc366 | ||
|
ab2bce59ca | ||
|
8b1d81b2bb | ||
|
0298ce2106 | ||
|
ad36e08451 |
30 changed files with 2587 additions and 933 deletions
53
.github/workflows/docker_ghpr.yml
vendored
53
.github/workflows/docker_ghpr.yml
vendored
|
@ -1,53 +0,0 @@
|
||||||
# This workflow uses actions that are not certified by GitHub.
|
|
||||||
# They are provided by a third-party and are governed by
|
|
||||||
# separate terms of service, privacy policy, and support
|
|
||||||
# documentation.
|
|
||||||
|
|
||||||
name: Create and publish a Docker image to GitHub Packages Repository
|
|
||||||
|
|
||||||
on: workflow_dispatch
|
|
||||||
|
|
||||||
env:
|
|
||||||
REGISTRY: ghcr.io
|
|
||||||
IMAGE_NAME: ${{ github.repository }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push-image:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
with:
|
|
||||||
platforms: 'arm64'
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Log in to the Container registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v3
|
|
||||||
with:
|
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v3
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
/result*
|
||||||
|
/.venv
|
||||||
|
__pycache__
|
12
.vscode/settings.json
vendored
Normal file
12
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": true
|
||||||
|
},
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.formatOnSaveMode": "file",
|
||||||
|
"python.analysis.typeCheckingMode": "off",
|
||||||
|
"python.formatting.provider": "none",
|
||||||
|
"[python]": {
|
||||||
|
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||||
|
}
|
||||||
|
}
|
25
Dockerfile
25
Dockerfile
|
@ -1,25 +0,0 @@
|
||||||
FROM python:3.9
|
|
||||||
LABEL maintainer="neurocis <neurocis@neurocis.me>"
|
|
||||||
|
|
||||||
RUN true && \
|
|
||||||
\
|
|
||||||
# Install python prerequisites
|
|
||||||
pip3 install \
|
|
||||||
grpcio==1.50.0 six==1.16.0 \
|
|
||||||
influxdb==5.3.1 certifi==2022.9.24 charset-normalizer==2.1.1 idna==3.4 \
|
|
||||||
msgpack==1.0.4 python-dateutil==2.8.2 pytz==2022.6 requests==2.28.1 \
|
|
||||||
urllib3==1.26.12 \
|
|
||||||
influxdb-client==1.34.0 reactivex==4.0.4 \
|
|
||||||
paho-mqtt==1.6.1 \
|
|
||||||
pypng==0.20220715.0 \
|
|
||||||
typing_extensions==4.4.0 \
|
|
||||||
yagrc==1.1.1 grpcio-reflection==1.50.0 protobuf==4.21.9
|
|
||||||
|
|
||||||
ADD . /app
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
ENTRYPOINT ["/bin/sh", "/app/entrypoint.sh"]
|
|
||||||
CMD ["dish_grpc_influx.py status alert_detail"]
|
|
||||||
|
|
||||||
# docker run -d --name='starlink-grpc-tools' -e INFLUXDB_HOST=192.168.1.34 -e INFLUXDB_PORT=8086 -e INFLUXDB_DB=starlink
|
|
||||||
# --net='br0' --ip='192.168.1.39' ghcr.io/sparky8512/starlink-grpc-tools dish_grpc_influx.py status alert_detail
|
|
|
@ -1,54 +0,0 @@
|
||||||
#!/usr/bin/python3
|
|
||||||
"""Manipulate operating state of a Starlink user terminal."""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import grpc
|
|
||||||
from yagrc import reflector as yagrc_reflector
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
|
||||||
parser = argparse.ArgumentParser(description="Starlink user terminal state control")
|
|
||||||
parser.add_argument("command", choices=["reboot", "stow", "unstow"])
|
|
||||||
parser.add_argument("-e",
|
|
||||||
"--target",
|
|
||||||
default="192.168.100.1:9200",
|
|
||||||
help="host:port of dish to query, default is the standard IP address "
|
|
||||||
"and port (192.168.100.1:9200)")
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
opts = parse_args()
|
|
||||||
|
|
||||||
logging.basicConfig(format="%(levelname)s: %(message)s")
|
|
||||||
|
|
||||||
reflector = yagrc_reflector.GrpcReflectionClient()
|
|
||||||
try:
|
|
||||||
with grpc.insecure_channel(opts.target) as channel:
|
|
||||||
reflector.load_protocols(channel, symbols=["SpaceX.API.Device.Device"])
|
|
||||||
request_class = reflector.message_class("SpaceX.API.Device.Request")
|
|
||||||
if opts.command == "reboot":
|
|
||||||
request = request_class(reboot={})
|
|
||||||
elif opts.command == "stow":
|
|
||||||
request = request_class(dish_stow={})
|
|
||||||
else: # unstow
|
|
||||||
request = request_class(dish_stow={"unstow": True})
|
|
||||||
stub = reflector.service_stub_class("SpaceX.API.Device.Device")(channel)
|
|
||||||
stub.Handle(request, timeout=10)
|
|
||||||
# response is just empty message, so ignore it
|
|
||||||
except grpc.RpcError as e:
|
|
||||||
if isinstance(e, grpc.Call):
|
|
||||||
msg = e.details()
|
|
||||||
else:
|
|
||||||
msg = "Unknown communication or service error"
|
|
||||||
logging.error(msg)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,5 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
printenv >> /etc/environment
|
|
||||||
ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
|
||||||
exec /usr/local/bin/python3 $@
|
|
169
flake.lock
Normal file
169
flake.lock
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"bash": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1678247195,
|
||||||
|
"narHash": "sha256-m/wSwlSket+hob3JED4XUvoWJLtW7yhtOiZrlRDMShs=",
|
||||||
|
"ref": "refs/heads/main",
|
||||||
|
"rev": "e7a00dcc0e75bc3ef6856bdd94d7d809245f5636",
|
||||||
|
"revCount": 1,
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.ocjtech.us/jeff/nixos-bash-prompt-builder.git"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.ocjtech.us/jeff/nixos-bash-prompt-builder.git"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1692799911,
|
||||||
|
"narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-utils_2": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems_2"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1689068808,
|
||||||
|
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"make-shell": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1634940815,
|
||||||
|
"narHash": "sha256-P69OmveboXzS+es1vQGS4bt+ckwbeIExqxfGLjGuJqA=",
|
||||||
|
"owner": "ursi",
|
||||||
|
"repo": "nix-make-shell",
|
||||||
|
"rev": "8add91681170924e4d0591b22f294aee3f5516f9",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "ursi",
|
||||||
|
"repo": "nix-make-shell",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nix-github-actions": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": [
|
||||||
|
"poetry2nix",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1688870561,
|
||||||
|
"narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=",
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "nix-github-actions",
|
||||||
|
"rev": "165b1650b753316aa7f1787f3005a8d2da0f5301",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "nix-github-actions",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1693341273,
|
||||||
|
"narHash": "sha256-wrsPjsIx2767909MPGhSIOmkpGELM9eufqLQOPxmZQg=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "2ab91c8d65c00fd22a441c69bbf1bc9b420d5ea1",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "nixpkgs",
|
||||||
|
"ref": "nixos-23.05",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"poetry2nix": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils_2",
|
||||||
|
"nix-github-actions": "nix-github-actions",
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1693051011,
|
||||||
|
"narHash": "sha256-HNbuVCS/Fnl1YZOjBk9/MlIem+wM8fvIzTH0CVQrLSQ=",
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "poetry2nix",
|
||||||
|
"rev": "5b3a5151cf212021ff8d424f215fb030e4ff2837",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "poetry2nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"bash": "bash",
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"make-shell": "make-shell",
|
||||||
|
"nixpkgs": "nixpkgs",
|
||||||
|
"poetry2nix": "poetry2nix"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems_2": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
87
flake.nix
Normal file
87
flake.nix
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
{
|
||||||
|
description = "Starling gRPC Tools";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs = {
|
||||||
|
url = "nixpkgs/nixos-23.05";
|
||||||
|
};
|
||||||
|
poetry2nix = {
|
||||||
|
url = "github:nix-community/poetry2nix";
|
||||||
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
};
|
||||||
|
flake-utils = {
|
||||||
|
url = "github:numtide/flake-utils";
|
||||||
|
};
|
||||||
|
bash = {
|
||||||
|
url = "git+https://git.ocjtech.us/jeff/nixos-bash-prompt-builder.git";
|
||||||
|
};
|
||||||
|
make-shell = {
|
||||||
|
url = "github:ursi/nix-make-shell";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
outputs = { self, nixpkgs, poetry2nix, flake-utils, bash, make-shell, ... }@inputs:
|
||||||
|
flake-utils.lib.eachDefaultSystem
|
||||||
|
(system:
|
||||||
|
let
|
||||||
|
inherit (poetry2nix.legacyPackages.${system}) mkPoetryApplication overrides;
|
||||||
|
pkgs = import nixpkgs {
|
||||||
|
inherit system;
|
||||||
|
};
|
||||||
|
python = pkgs.python311.withPackages (ps: with ps; [
|
||||||
|
poetry-core
|
||||||
|
]);
|
||||||
|
in
|
||||||
|
{
|
||||||
|
devShells.default =
|
||||||
|
let
|
||||||
|
make-shell = import inputs.make-shell {
|
||||||
|
inherit system;
|
||||||
|
pkgs = pkgs;
|
||||||
|
};
|
||||||
|
project = "starlink";
|
||||||
|
prompt = (
|
||||||
|
bash.build_prompt
|
||||||
|
bash.ansi_normal_blue
|
||||||
|
"${project} - ${bash.username}@${bash.hostname_short}: ${bash.current_working_directory}"
|
||||||
|
"${project}:${bash.current_working_directory}"
|
||||||
|
);
|
||||||
|
in
|
||||||
|
make-shell {
|
||||||
|
packages = [
|
||||||
|
python
|
||||||
|
pkgs.poetry
|
||||||
|
];
|
||||||
|
env = {
|
||||||
|
POETRY_VIRTUALENVS_IN_PROJECT = "true";
|
||||||
|
PS1 = prompt;
|
||||||
|
};
|
||||||
|
setup = ''
|
||||||
|
export PATH=''$(pwd)/.venv/bin:$PATH
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
packages = {
|
||||||
|
starlink-grpc-tools = mkPoetryApplication {
|
||||||
|
python = pkgs.python311;
|
||||||
|
projectDir = ./.;
|
||||||
|
groups = [ ];
|
||||||
|
overrides = overrides.withDefaults (
|
||||||
|
self: super: {
|
||||||
|
yagrc = super.yagrc.overridePythonAttrs (
|
||||||
|
old: {
|
||||||
|
buildInputs = old.buildInputs ++ [ self.setuptools ];
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
meta = with pkgs.lib; {
|
||||||
|
homepage = "https://github.com/sparky8512/starlink-grpc-tools";
|
||||||
|
description = "";
|
||||||
|
longDescription = '''';
|
||||||
|
license = licenses.unlicense;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
default = self.packages.${system}.starlink-grpc-tools;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
|
@ -1,26 +0,0 @@
|
||||||
# starlink-grpc-tools Core Module
|
|
||||||
|
|
||||||
This project packages up the `starlink_grpc` module from the [starlink-grpc-tools](https://github.com/sparky8512/starlink-grpc-tools) project and exports it as an installable package for use by other projects. It is not needed to install this project in order to use the scripts in starlink-grpc-tools, as those have their own copy of `starlink_grpc.py`.
|
|
||||||
|
|
||||||
`starlink_grpc.py` is the only part of the scripts in starlink-grpc-tools that is designed to have a stable enough interface to be directly callable from other projects without having to go through a clunky command line interface. It provides the low(er) level core functionality available via the [gRPC](https://grpc.io/) service implemented on the Starlink user terminal.
|
|
||||||
|
|
||||||
# Installation
|
|
||||||
|
|
||||||
The most recently published version of this project can be installed by itself using pip:
|
|
||||||
```shell script
|
|
||||||
pip install starlink-grpc-core
|
|
||||||
```
|
|
||||||
However, it is really meant to be installed as a dependency by other projects.
|
|
||||||
|
|
||||||
# Usage
|
|
||||||
|
|
||||||
The installation process places the `starlink_grpc.py` module in the top-level of your Python lib directory or virtual environment, so it can be used simply by doing:
|
|
||||||
```python
|
|
||||||
import starlink_grpc
|
|
||||||
```
|
|
||||||
and then calling whatever functions you need. For details, see the doc strings in `starlink_grpc.py`.
|
|
||||||
|
|
||||||
# Examples
|
|
||||||
|
|
||||||
For example usage, see calling scripts in the [starlink-grpc-tools](https://github.com/sparky8512/starlink-grpc-tools) project, most of which are hopelessly convoluted, but some of which show simple usage of the `starlink_grpc` functions.
|
|
||||||
|
|
|
@ -1,10 +0,0 @@
|
||||||
[build-system]
|
|
||||||
requires = [
|
|
||||||
"setuptools>=42",
|
|
||||||
"setuptools_scm[toml]>=3.4",
|
|
||||||
"wheel"
|
|
||||||
]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[tool.setuptools_scm]
|
|
||||||
root = ".."
|
|
|
@ -1,3 +0,0 @@
|
||||||
import setuptools
|
|
||||||
|
|
||||||
setuptools.setup()
|
|
773
poetry.lock
generated
Normal file
773
poetry.lock
generated
Normal file
|
@ -0,0 +1,773 @@
|
||||||
|
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "black"
|
||||||
|
version = "23.7.0"
|
||||||
|
description = "The uncompromising code formatter."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"},
|
||||||
|
{file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"},
|
||||||
|
{file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"},
|
||||||
|
{file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"},
|
||||||
|
{file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"},
|
||||||
|
{file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"},
|
||||||
|
{file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"},
|
||||||
|
{file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"},
|
||||||
|
{file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"},
|
||||||
|
{file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"},
|
||||||
|
{file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"},
|
||||||
|
{file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"},
|
||||||
|
{file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"},
|
||||||
|
{file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"},
|
||||||
|
{file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"},
|
||||||
|
{file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"},
|
||||||
|
{file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"},
|
||||||
|
{file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"},
|
||||||
|
{file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"},
|
||||||
|
{file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"},
|
||||||
|
{file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"},
|
||||||
|
{file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
click = ">=8.0.0"
|
||||||
|
mypy-extensions = ">=0.4.3"
|
||||||
|
packaging = ">=22.0"
|
||||||
|
pathspec = ">=0.9.0"
|
||||||
|
platformdirs = ">=2"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
colorama = ["colorama (>=0.4.3)"]
|
||||||
|
d = ["aiohttp (>=3.7.4)"]
|
||||||
|
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||||
|
uvloop = ["uvloop (>=0.15.2)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "certifi"
|
||||||
|
version = "2023.7.22"
|
||||||
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
|
||||||
|
{file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "charset-normalizer"
|
||||||
|
version = "3.2.0"
|
||||||
|
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7.0"
|
||||||
|
files = [
|
||||||
|
{file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"},
|
||||||
|
{file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"},
|
||||||
|
{file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click"
|
||||||
|
version = "8.1.7"
|
||||||
|
description = "Composable command line interface toolkit"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
|
||||||
|
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorama"
|
||||||
|
version = "0.4.6"
|
||||||
|
description = "Cross-platform colored terminal text."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||||
|
files = [
|
||||||
|
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||||
|
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flake8"
|
||||||
|
version = "6.1.0"
|
||||||
|
description = "the modular source code checker: pep8 pyflakes and co"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8.1"
|
||||||
|
files = [
|
||||||
|
{file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"},
|
||||||
|
{file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
mccabe = ">=0.7.0,<0.8.0"
|
||||||
|
pycodestyle = ">=2.11.0,<2.12.0"
|
||||||
|
pyflakes = ">=3.1.0,<3.2.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flake8-pyproject"
|
||||||
|
version = "1.2.3"
|
||||||
|
description = "Flake8 plug-in loading the configuration from pyproject.toml"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">= 3.6"
|
||||||
|
files = [
|
||||||
|
{file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
Flake8 = ">=5"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["pyTest", "pyTest-cov"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "grpcio"
|
||||||
|
version = "1.57.0"
|
||||||
|
description = "HTTP/2-based RPC framework"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:092fa155b945015754bdf988be47793c377b52b88d546e45c6a9f9579ac7f7b6"},
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f7349786da979a94690cc5c2b804cab4e8774a3cf59be40d037c4342c906649"},
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:82640e57fb86ea1d71ea9ab54f7e942502cf98a429a200b2e743d8672171734f"},
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40b72effd4c789de94ce1be2b5f88d7b9b5f7379fe9645f198854112a6567d9a"},
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f708a6a17868ad8bf586598bee69abded4996b18adf26fd2d91191383b79019"},
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60fe15288a0a65d5c1cb5b4a62b1850d07336e3ba728257a810317be14f0c527"},
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6907b1cf8bb29b058081d2aad677b15757a44ef2d4d8d9130271d2ad5e33efca"},
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-win32.whl", hash = "sha256:57b183e8b252825c4dd29114d6c13559be95387aafc10a7be645462a0fc98bbb"},
|
||||||
|
{file = "grpcio-1.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:7b400807fa749a9eb286e2cd893e501b110b4d356a218426cb9c825a0474ca56"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6ebecfb7a31385393203eb04ed8b6a08f5002f53df3d59e5e795edb80999652"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:00258cbe3f5188629828363ae8ff78477ce976a6f63fb2bb5e90088396faa82e"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:23e7d8849a0e58b806253fd206ac105b328171e01b8f18c7d5922274958cc87e"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5371bcd861e679d63b8274f73ac281751d34bd54eccdbfcd6aa00e692a82cd7b"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aed90d93b731929e742967e236f842a4a2174dc5db077c8f9ad2c5996f89f63e"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe752639919aad9ffb0dee0d87f29a6467d1ef764f13c4644d212a9a853a078d"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fada6b07ec4f0befe05218181f4b85176f11d531911b64c715d1875c4736d73a"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-win32.whl", hash = "sha256:bb396952cfa7ad2f01061fbc7dc1ad91dd9d69243bcb8110cf4e36924785a0fe"},
|
||||||
|
{file = "grpcio-1.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:e503cb45ed12b924b5b988ba9576dc9949b2f5283b8e33b21dcb6be74a7c58d0"},
|
||||||
|
{file = "grpcio-1.57.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:fd173b4cf02b20f60860dc2ffe30115c18972d7d6d2d69df97ac38dee03be5bf"},
|
||||||
|
{file = "grpcio-1.57.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:d7f8df114d6b4cf5a916b98389aeaf1e3132035420a88beea4e3d977e5f267a5"},
|
||||||
|
{file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:76c44efa4ede1f42a9d5b2fed1fe9377e73a109bef8675fb0728eb80b0b8e8f2"},
|
||||||
|
{file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4faea2cfdf762a664ab90589b66f416274887641ae17817de510b8178356bf73"},
|
||||||
|
{file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c60b83c43faeb6d0a9831f0351d7787a0753f5087cc6fa218d78fdf38e5acef0"},
|
||||||
|
{file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b363bbb5253e5f9c23d8a0a034dfdf1b7c9e7f12e602fc788c435171e96daccc"},
|
||||||
|
{file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f1fb0fd4a1e9b11ac21c30c169d169ef434c6e9344ee0ab27cfa6f605f6387b2"},
|
||||||
|
{file = "grpcio-1.57.0-cp37-cp37m-win_amd64.whl", hash = "sha256:34950353539e7d93f61c6796a007c705d663f3be41166358e3d88c45760c7d98"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:871f9999e0211f9551f368612460442a5436d9444606184652117d6a688c9f51"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:a8a8e560e8dbbdf29288872e91efd22af71e88b0e5736b0daf7773c1fecd99f0"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2313b124e475aa9017a9844bdc5eafb2d5abdda9d456af16fc4535408c7d6da6"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4098b6b638d9e0ca839a81656a2fd4bc26c9486ea707e8b1437d6f9d61c3941"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e5b58e32ae14658085c16986d11e99abd002ddbf51c8daae8a0671fffb3467f"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0f80bf37f09e1caba6a8063e56e2b87fa335add314cf2b78ebf7cb45aa7e3d06"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5b7a4ce8f862fe32b2a10b57752cf3169f5fe2915acfe7e6a1e155db3da99e79"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-win32.whl", hash = "sha256:9338bacf172e942e62e5889b6364e56657fbf8ac68062e8b25c48843e7b202bb"},
|
||||||
|
{file = "grpcio-1.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:e1cb52fa2d67d7f7fab310b600f22ce1ff04d562d46e9e0ac3e3403c2bb4cc16"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fee387d2fab144e8a34e0e9c5ca0f45c9376b99de45628265cfa9886b1dbe62b"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b53333627283e7241fcc217323f225c37783b5f0472316edcaa4479a213abfa6"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f19ac6ac0a256cf77d3cc926ef0b4e64a9725cc612f97228cd5dc4bd9dbab03b"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3fdf04e402f12e1de8074458549337febb3b45f21076cc02ef4ff786aff687e"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5613a2fecc82f95d6c51d15b9a72705553aa0d7c932fad7aed7afb51dc982ee5"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b670c2faa92124b7397b42303e4d8eb64a4cd0b7a77e35a9e865a55d61c57ef9"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a635589201b18510ff988161b7b573f50c6a48fae9cb567657920ca82022b37"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-win32.whl", hash = "sha256:d78d8b86fcdfa1e4c21f8896614b6cc7ee01a2a758ec0c4382d662f2a62cf766"},
|
||||||
|
{file = "grpcio-1.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:20ec6fc4ad47d1b6e12deec5045ec3cd5402d9a1597f738263e98f490fe07056"},
|
||||||
|
{file = "grpcio-1.57.0.tar.gz", hash = "sha256:4b089f7ad1eb00a104078bab8015b0ed0ebcb3b589e527ab009c53893fd4e613"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
protobuf = ["grpcio-tools (>=1.57.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "grpcio-reflection"
|
||||||
|
version = "1.57.0"
|
||||||
|
description = "Standard Protobuf Reflection Service for gRPC"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "grpcio-reflection-1.57.0.tar.gz", hash = "sha256:8f63a18729cba995a172f8325235f5094cb066febec75f9a3b1b2e28328aa166"},
|
||||||
|
{file = "grpcio_reflection-1.57.0-py3-none-any.whl", hash = "sha256:d7deb8587f9d0095fb5d367c2aa5ce1380e3f23b0f8bca6c00bc404c5429cb6a"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
grpcio = ">=1.57.0"
|
||||||
|
protobuf = ">=4.21.6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "grpcio-tools"
|
||||||
|
version = "1.57.0"
|
||||||
|
description = "Protobuf code generator for gRPC"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "grpcio-tools-1.57.0.tar.gz", hash = "sha256:2f16130d869ce27ecd623194547b649dd657333ec7e8644cc571c645781a9b85"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:4fb8a8468031f858381a576078924af364a08833d8f8f3237018252c4573a802"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:35bf0dad8a3562043345236c26d0053a856fb06c04d7da652f2ded914e508ae7"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:ec9aab2fb6783c7fc54bc28f58eb75f1ca77594e6b0fd5e5e7a8114a95169fe0"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf5fc0a1c23f8ea34b408b72fb0e90eec0f404ad4dba98e8f6da3c9ce34e2ed"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e69d08a515554e0cfe1ec4d31568836f4b17f0ff82294f957f629388629eb9"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c39a3656576b6fdaaf28abe0467f7a7231df4230c1bee132322dbc3209419e7f"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f64f8ab22d27d4a5693310748d35a696061c3b5c7b8c4fb4ab3b4bc1068b6b56"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-win32.whl", hash = "sha256:d2a134756f4db34759a5cc7f7e43f7eb87540b68d1cca62925593c6fb93924f7"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a3d60fb8d46ede26c1907c146561b3a9caa20a7aff961bc661ef8226f85a2e9"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:aac98ecad8f7bd4301855669d42a5d97ef7bb34bec2b1e74c7a0641d47e313cf"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:cdd020cb68b51462983b7c2dfbc3eb6ede032b8bf438d4554df0c3f08ce35c76"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:f54081b08419a39221cd646363b5708857c696b3ad4784f1dcf310891e33a5f7"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed85a0291fff45b67f2557fe7f117d3bc7af8b54b8619d27bf374b5c8b7e3ca2"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e868cd6feb3ef07d4b35be104fe1fd0657db05259ff8f8ec5e08f4f89ca1191d"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dfb6f6120587b8e228a3cae5ee4985b5bdc18501bad05c49df61965dfc9d70a9"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a7ad7f328e28fc97c356d0f10fb10d8b5151bb65aa7cf14bf8084513f0b7306"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-win32.whl", hash = "sha256:9867f2817b1a0c93c523f89ac6c9d8625548af4620a7ce438bf5a76e23327284"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:1f9e917a9f18087f6c14b4d4508fb94fca5c2f96852363a89232fb9b2124ac1f"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:9f2aefa8a37bd2c4db1a3f1aca11377e2766214520fb70e67071f4ff8d8b0fa5"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:850cbda0ec5d24c39e7215ede410276040692ca45d105fbbeada407fa03f0ac0"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6fa52972c9647876ea35f6dc2b51002a74ed900ec7894586cbb2fe76f64f99de"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0eea89d7542719594e50e2283f51a072978b953e8b3e9fd7c59a2c762d4c1"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3da5240211252fc70a6451fe00c143e2ab2f7bfc2445695ad2ed056b8e48d96"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a0256f8786ac9e4db618a1aa492bb3472569a0946fd3ee862ffe23196323da55"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c026bdf5c1366ce88b7bbe2d8207374d675afd3fd911f60752103de3da4a41d2"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9053c2f655589545be08b9d6a673e92970173a4bf11a4b9f18cd6e9af626b587"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:81ec4dbb696e095057b2528d11a8da04be6bbe2b967fa07d4ea9ba6354338cbf"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:495e2946406963e0b9f063f76d5af0f2a19517dac2b367b5b044432ac9194296"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:7b46fc6aa8eb7edd18cafcd21fd98703cb6c09e46b507de335fca7f0161dfccb"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb81ff861692111fa81bd85f64584e624cb4013bd66fbce8a209b8893f5ce398"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a42dc220eb5305f470855c9284f4c8e85ae59d6d742cd07946b0cbe5e9ca186"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90d10d9038ba46a595a223a34f136c9230e3d6d7abc2433dbf0e1c31939d3a8b"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5bc3e6d338aefb052e19cedabe00452be46d0c10a4ed29ee77abb00402e438fe"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-win32.whl", hash = "sha256:34b36217b17b5bea674a414229913e1fd80ede328be51e1b531fcc62abd393b0"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbde4004a0688400036342ff73e3706e8940483e2871547b1354d59e93a38277"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:784574709b9690dc28696617ea69352e2132352fdfc9bc89afa8e39f99ae538e"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:85ac4e62eb44428cde025fd9ab7554002315fc7880f791c553fc5a0015cc9931"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:dc771d4db5701f280957bbcee91745e0686d00ed1c6aa7e05ba30a58b02d70a1"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3ac06703c412f8167a9062eaf6099409967e33bf98fa5b02be4b4689b6bdf39"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02d78c034109f46032c7217260066d49d41e6bcaf588fa28fa40fe2f83445347"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2db25f15ed44327f2e02d0c4fe741ac966f9500e407047d8a7c7fccf2df65616"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b417c97936d94874a3ce7ed8deab910f2233e3612134507cfee4af8735c38a6"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-win32.whl", hash = "sha256:f717cce5093e6b6049d9ea6d12fdf3658efdb1a80772f7737db1f8510b876df6"},
|
||||||
|
{file = "grpcio_tools-1.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:1c0e8a1a32973a5d59fbcc19232f925e5c48116e9411f788033a31c5ca5130b4"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
grpcio = ">=1.57.0"
|
||||||
|
protobuf = ">=4.21.6,<5.0dev"
|
||||||
|
setuptools = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "idna"
|
||||||
|
version = "3.4"
|
||||||
|
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5"
|
||||||
|
files = [
|
||||||
|
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||||
|
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "influxdb"
|
||||||
|
version = "5.3.1"
|
||||||
|
description = "InfluxDB client"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "influxdb-5.3.1-py2.py3-none-any.whl", hash = "sha256:65040a1f53d1a2a4f88a677e89e3a98189a7d30cf2ab61c318aaa89733280747"},
|
||||||
|
{file = "influxdb-5.3.1.tar.gz", hash = "sha256:46f85e7b04ee4b3dee894672be6a295c94709003a7ddea8820deec2ac4d8b27a"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
msgpack = "*"
|
||||||
|
python-dateutil = ">=2.6.0"
|
||||||
|
pytz = "*"
|
||||||
|
requests = ">=2.17.0"
|
||||||
|
six = ">=1.10.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
test = ["mock", "nose", "nose-cov", "requests-mock"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "influxdb-client"
|
||||||
|
version = "1.37.0"
|
||||||
|
description = "InfluxDB 2.0 Python client library"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "influxdb_client-1.37.0-py3-none-any.whl", hash = "sha256:30b03888846fab6fb740936b1a08af24841b791cf1b99773e3894dd1d64edf2f"},
|
||||||
|
{file = "influxdb_client-1.37.0.tar.gz", hash = "sha256:01ac44d6a16a965ae2e0fa3238e2edeb147c11935a89b61439c9a752458001da"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
certifi = ">=14.05.14"
|
||||||
|
python-dateutil = ">=2.5.3"
|
||||||
|
reactivex = ">=4.0.4"
|
||||||
|
setuptools = ">=21.0.0"
|
||||||
|
urllib3 = ">=1.26.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
async = ["aiocsv (>=1.2.2)", "aiohttp (>=3.8.1)"]
|
||||||
|
ciso = ["ciso8601 (>=2.1.1)"]
|
||||||
|
extra = ["numpy", "pandas (>=0.25.3)"]
|
||||||
|
test = ["aioresponses (>=0.7.3)", "coverage (>=4.0.3)", "flake8 (>=5.0.3)", "httpretty (==1.0.5)", "jinja2 (==3.1.2)", "nose (>=1.3.7)", "pluggy (>=0.3.1)", "psutil (>=5.6.3)", "py (>=1.4.31)", "pytest (>=5.0.0)", "pytest-cov (>=3.0.0)", "pytest-timeout (>=2.1.0)", "randomize (>=0.13)", "sphinx (==1.8.5)", "sphinx-rtd-theme"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "isort"
|
||||||
|
version = "5.12.0"
|
||||||
|
description = "A Python utility / library to sort Python imports."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8.0"
|
||||||
|
files = [
|
||||||
|
{file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
|
||||||
|
{file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
colors = ["colorama (>=0.4.3)"]
|
||||||
|
pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
|
||||||
|
plugins = ["setuptools"]
|
||||||
|
requirements-deprecated-finder = ["pip-api", "pipreqs"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mccabe"
|
||||||
|
version = "0.7.0"
|
||||||
|
description = "McCabe checker, plugin for flake8"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
|
||||||
|
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "msgpack"
|
||||||
|
version = "1.0.5"
|
||||||
|
description = "MessagePack serializer"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-win32.whl", hash = "sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea"},
|
||||||
|
{file = "msgpack-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-win32.whl", hash = "sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c"},
|
||||||
|
{file = "msgpack-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-win32.whl", hash = "sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9"},
|
||||||
|
{file = "msgpack-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-win32.whl", hash = "sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77"},
|
||||||
|
{file = "msgpack-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-win32.whl", hash = "sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e"},
|
||||||
|
{file = "msgpack-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-win32.whl", hash = "sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc"},
|
||||||
|
{file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"},
|
||||||
|
{file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mypy-extensions"
|
||||||
|
version = "1.0.0"
|
||||||
|
description = "Type system extensions for programs checked with the mypy type checker."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5"
|
||||||
|
files = [
|
||||||
|
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
|
||||||
|
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "23.1"
|
||||||
|
description = "Core utilities for Python packages"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
|
||||||
|
{file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "paho-mqtt"
|
||||||
|
version = "1.6.1"
|
||||||
|
description = "MQTT version 5.0/3.1.1 client class"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "paho-mqtt-1.6.1.tar.gz", hash = "sha256:2a8291c81623aec00372b5a85558a372c747cbca8e9934dfe218638b8eefc26f"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
proxy = ["PySocks"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pathspec"
|
||||||
|
version = "0.11.2"
|
||||||
|
description = "Utility library for gitignore style pattern matching of file paths."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"},
|
||||||
|
{file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "platformdirs"
|
||||||
|
version = "3.10.0"
|
||||||
|
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"},
|
||||||
|
{file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
|
||||||
|
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "protobuf"
|
||||||
|
version = "4.24.2"
|
||||||
|
description = ""
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "protobuf-4.24.2-cp310-abi3-win32.whl", hash = "sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924"},
|
||||||
|
{file = "protobuf-4.24.2-cp310-abi3-win_amd64.whl", hash = "sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3"},
|
||||||
|
{file = "protobuf-4.24.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880"},
|
||||||
|
{file = "protobuf-4.24.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:25ae91d21e3ce8d874211110c2f7edd6384816fb44e06b2867afe35139e1fd1c"},
|
||||||
|
{file = "protobuf-4.24.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:c00c3c7eb9ad3833806e21e86dca448f46035242a680f81c3fe068ff65e79c74"},
|
||||||
|
{file = "protobuf-4.24.2-cp37-cp37m-win32.whl", hash = "sha256:4e69965e7e54de4db989289a9b971a099e626f6167a9351e9d112221fc691bc1"},
|
||||||
|
{file = "protobuf-4.24.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c5cdd486af081bf752225b26809d2d0a85e575b80a84cde5172a05bbb1990099"},
|
||||||
|
{file = "protobuf-4.24.2-cp38-cp38-win32.whl", hash = "sha256:6bd26c1fa9038b26c5c044ee77e0ecb18463e957fefbaeb81a3feb419313a54e"},
|
||||||
|
{file = "protobuf-4.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb7aa97c252279da65584af0456f802bd4b2de429eb945bbc9b3d61a42a8cd16"},
|
||||||
|
{file = "protobuf-4.24.2-cp39-cp39-win32.whl", hash = "sha256:2b23bd6e06445699b12f525f3e92a916f2dcf45ffba441026357dea7fa46f42b"},
|
||||||
|
{file = "protobuf-4.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:839952e759fc40b5d46be319a265cf94920174d88de31657d5622b5d8d6be5cd"},
|
||||||
|
{file = "protobuf-4.24.2-py3-none-any.whl", hash = "sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e"},
|
||||||
|
{file = "protobuf-4.24.2.tar.gz", hash = "sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pycodestyle"
|
||||||
|
version = "2.11.0"
|
||||||
|
description = "Python style guide checker"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"},
|
||||||
|
{file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pyflakes"
|
||||||
|
version = "3.1.0"
|
||||||
|
description = "passive checker of Python programs"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"},
|
||||||
|
{file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pypng"
|
||||||
|
version = "0.20220715.0"
|
||||||
|
description = "Pure Python library for saving and loading PNG images"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "pypng-0.20220715.0-py3-none-any.whl", hash = "sha256:4a43e969b8f5aaafb2a415536c1a8ec7e341cd6a3f957fd5b5f32a4cfeed902c"},
|
||||||
|
{file = "pypng-0.20220715.0.tar.gz", hash = "sha256:739c433ba96f078315de54c0db975aee537cbc3e1d0ae4ed9aab0ca1e427e2c1"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-dateutil"
|
||||||
|
version = "2.8.2"
|
||||||
|
description = "Extensions to the standard Python datetime module"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||||
|
files = [
|
||||||
|
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||||
|
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
six = ">=1.5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytz"
|
||||||
|
version = "2023.3"
|
||||||
|
description = "World timezone definitions, modern and historical"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
|
||||||
|
{file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "reactivex"
|
||||||
|
version = "4.0.4"
|
||||||
|
description = "ReactiveX (Rx) for Python"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7,<4.0"
|
||||||
|
files = [
|
||||||
|
{file = "reactivex-4.0.4-py3-none-any.whl", hash = "sha256:0004796c420bd9e68aad8e65627d85a8e13f293de76656165dffbcb3a0e3fb6a"},
|
||||||
|
{file = "reactivex-4.0.4.tar.gz", hash = "sha256:e912e6591022ab9176df8348a653fe8c8fa7a301f26f9931c9d8c78a650e04e8"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
typing-extensions = ">=4.1.1,<5.0.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "requests"
|
||||||
|
version = "2.31.0"
|
||||||
|
description = "Python HTTP for Humans."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||||
|
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
certifi = ">=2017.4.17"
|
||||||
|
charset-normalizer = ">=2,<4"
|
||||||
|
idna = ">=2.5,<4"
|
||||||
|
urllib3 = ">=1.21.1,<3"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
|
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "setuptools"
|
||||||
|
version = "68.1.2"
|
||||||
|
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"},
|
||||||
|
{file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||||
|
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||||
|
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "six"
|
||||||
|
version = "1.16.0"
|
||||||
|
description = "Python 2 and 3 compatibility utilities"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||||
|
files = [
|
||||||
|
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||||
|
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-extensions"
|
||||||
|
version = "4.7.1"
|
||||||
|
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"},
|
||||||
|
{file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "urllib3"
|
||||||
|
version = "2.0.4"
|
||||||
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"},
|
||||||
|
{file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
||||||
|
secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
|
||||||
|
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
|
zstd = ["zstandard (>=0.18.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yagrc"
|
||||||
|
version = "1.1.2"
|
||||||
|
description = "Yet another gRPC reflection client"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "yagrc-1.1.2-py3-none-any.whl", hash = "sha256:ef7fc6da021c10f3cac2ea21147931bca97e90341645d28edd6c90b68b0ef3f5"},
|
||||||
|
{file = "yagrc-1.1.2.tar.gz", hash = "sha256:ef264bf98bfbc8f9932dca85048838e6f541db87de4937610ea15e035afe1251"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
grpcio = ">=1.12.0"
|
||||||
|
grpcio-reflection = ">=1.7.3"
|
||||||
|
protobuf = ">=4.22.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
test = ["pytest", "pytest-grpc"]
|
||||||
|
|
||||||
|
[metadata]
|
||||||
|
lock-version = "2.0"
|
||||||
|
python-versions = "^3.11"
|
||||||
|
content-hash = "d0558513b225264653cde8153abe3425004ee3298c1407cbb2dc672fe356b2f3"
|
45
pyproject.toml
Normal file
45
pyproject.toml
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
[tool.poetry]
|
||||||
|
name = "starlink-grpc-tools"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = ""
|
||||||
|
authors = ["Jeffrey C. Ollie <jeff@ocjtech.us>"]
|
||||||
|
readme = "README.md"
|
||||||
|
packages = [{ include = "starlink_grpc_tools" }]
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.11"
|
||||||
|
grpcio = "^1.57.0"
|
||||||
|
grpcio-tools = "^1.57.0"
|
||||||
|
protobuf = "^4.24.2"
|
||||||
|
yagrc = "*"
|
||||||
|
paho-mqtt = "^1.6.1"
|
||||||
|
influxdb = "^5.3.1"
|
||||||
|
influxdb-client = "^1.37.0"
|
||||||
|
pypng = "^0.20220715.0"
|
||||||
|
typing-extensions = "^4.7.1"
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
black = "^23.7.0"
|
||||||
|
flake8 = "^6.1.0"
|
||||||
|
flake8-pyproject = "^1.2.3"
|
||||||
|
isort = "^5.12.0"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
line_length = 88
|
||||||
|
force_single_line = true
|
||||||
|
force_sort_within_sections = true
|
||||||
|
from_first = false
|
||||||
|
|
||||||
|
[tool.flake8]
|
||||||
|
max-line-length = 120
|
||||||
|
extend-ignore = "E203"
|
||||||
|
|
||||||
|
[tool.poetry.scripts]
|
||||||
|
dish_grpc_prometheus = "starlink_grpc_tools.dish_grpc_prometheus:main"
|
|
@ -1,9 +0,0 @@
|
||||||
grpcio>=1.12.0
|
|
||||||
grpcio-tools>=1.20.0
|
|
||||||
protobuf>=3.6.0
|
|
||||||
yagrc>=1.1.1
|
|
||||||
paho-mqtt>=1.5.1
|
|
||||||
influxdb>=5.3.1
|
|
||||||
influxdb_client>=1.23.0
|
|
||||||
pypng>=0.0.20
|
|
||||||
typing-extensions>=4.3.0
|
|
0
starlink_grpc_tools/__init__.py
Normal file
0
starlink_grpc_tools/__init__.py
Normal file
|
@ -20,13 +20,17 @@ from typing import List
|
||||||
|
|
||||||
import grpc
|
import grpc
|
||||||
|
|
||||||
import starlink_grpc
|
import starlink_grpc_tools.starlink_grpc as starlink_grpc
|
||||||
|
|
||||||
BRACKETS_RE = re.compile(r"([^[]*)(\[((\d+),|)(\d*)\]|)$")
|
BRACKETS_RE = re.compile(r"([^[]*)(\[((\d+),|)(\d*)\]|)$")
|
||||||
LOOP_TIME_DEFAULT = 0
|
LOOP_TIME_DEFAULT = 0
|
||||||
STATUS_MODES: List[str] = ["status", "obstruction_detail", "alert_detail", "location"]
|
STATUS_MODES: List[str] = ["status", "obstruction_detail", "alert_detail", "location"]
|
||||||
HISTORY_STATS_MODES: List[str] = [
|
HISTORY_STATS_MODES: List[str] = [
|
||||||
"ping_drop", "ping_run_length", "ping_latency", "ping_loaded_latency", "usage"
|
"ping_drop",
|
||||||
|
"ping_run_length",
|
||||||
|
"ping_latency",
|
||||||
|
"ping_loaded_latency",
|
||||||
|
"usage",
|
||||||
]
|
]
|
||||||
UNGROUPED_MODES: List[str] = []
|
UNGROUPED_MODES: List[str] = []
|
||||||
|
|
||||||
|
@ -34,60 +38,79 @@ UNGROUPED_MODES: List[str] = []
|
||||||
def create_arg_parser(output_description, bulk_history=True):
|
def create_arg_parser(output_description, bulk_history=True):
|
||||||
"""Create an argparse parser and add the common command line options."""
|
"""Create an argparse parser and add the common command line options."""
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Collect status and/or history data from a Starlink user terminal and " +
|
description="Collect status and/or history data from a Starlink user terminal and "
|
||||||
output_description,
|
+ output_description,
|
||||||
epilog="Additional arguments can be read from a file by including @FILENAME as an "
|
epilog="Additional arguments can be read from a file by including @FILENAME as an "
|
||||||
"option, where FILENAME is a path to a file that contains arguments, one per line.",
|
"option, where FILENAME is a path to a file that contains arguments, one per line.",
|
||||||
fromfile_prefix_chars="@",
|
fromfile_prefix_chars="@",
|
||||||
add_help=False)
|
add_help=False,
|
||||||
|
)
|
||||||
|
|
||||||
# need to remember this for later
|
# need to remember this for later
|
||||||
parser.bulk_history = bulk_history
|
parser.bulk_history = bulk_history
|
||||||
|
|
||||||
group = parser.add_argument_group(title="General options")
|
group = parser.add_argument_group(title="General options")
|
||||||
group.add_argument("-g",
|
group.add_argument(
|
||||||
|
"-g",
|
||||||
"--target",
|
"--target",
|
||||||
help="host:port of dish to query, default is the standard IP address "
|
help="host:port of dish to query, default is the standard IP address "
|
||||||
"and port (192.168.100.1:9200)")
|
"and port (192.168.100.1:9200)",
|
||||||
|
)
|
||||||
group.add_argument("-h", "--help", action="help", help="Be helpful")
|
group.add_argument("-h", "--help", action="help", help="Be helpful")
|
||||||
group.add_argument("-N",
|
group.add_argument(
|
||||||
|
"-N",
|
||||||
"--numeric",
|
"--numeric",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Record boolean values as 1 and 0 instead of True and False")
|
help="Record boolean values as 1 and 0 instead of True and False",
|
||||||
group.add_argument("-t",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-t",
|
||||||
"--loop-interval",
|
"--loop-interval",
|
||||||
type=float,
|
type=float,
|
||||||
default=float(LOOP_TIME_DEFAULT),
|
default=float(LOOP_TIME_DEFAULT),
|
||||||
help="Loop interval in seconds or 0 for no loop, default: " +
|
help="Loop interval in seconds or 0 for no loop, default: "
|
||||||
str(LOOP_TIME_DEFAULT))
|
+ str(LOOP_TIME_DEFAULT),
|
||||||
|
)
|
||||||
group.add_argument("-v", "--verbose", action="store_true", help="Be verbose")
|
group.add_argument("-v", "--verbose", action="store_true", help="Be verbose")
|
||||||
|
|
||||||
group = parser.add_argument_group(title="History mode options")
|
group = parser.add_argument_group(title="History mode options")
|
||||||
group.add_argument("-a",
|
group.add_argument(
|
||||||
|
"-a",
|
||||||
"--all-samples",
|
"--all-samples",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
const=-1,
|
const=-1,
|
||||||
dest="samples",
|
dest="samples",
|
||||||
help="Parse all valid samples")
|
help="Parse all valid samples",
|
||||||
group.add_argument("-o",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-o",
|
||||||
"--poll-loops",
|
"--poll-loops",
|
||||||
type=int,
|
type=int,
|
||||||
help="Poll history for N loops and aggregate data before computing history "
|
help="Poll history for N loops and aggregate data before computing history "
|
||||||
"stats; this allows for a smaller loop interval with less loss of data "
|
"stats; this allows for a smaller loop interval with less loss of data "
|
||||||
"when the dish reboots",
|
"when the dish reboots",
|
||||||
metavar="N")
|
metavar="N",
|
||||||
|
)
|
||||||
if bulk_history:
|
if bulk_history:
|
||||||
sample_help = ("Number of data samples to parse; normally applies to first loop "
|
sample_help = (
|
||||||
|
"Number of data samples to parse; normally applies to first loop "
|
||||||
"iteration only, default: all in bulk mode, loop interval if loop "
|
"iteration only, default: all in bulk mode, loop interval if loop "
|
||||||
"interval set, else all available samples")
|
"interval set, else all available samples"
|
||||||
no_counter_help = ("Don't track sample counter across loop iterations in non-bulk "
|
)
|
||||||
"modes; keep using samples option value instead")
|
no_counter_help = (
|
||||||
|
"Don't track sample counter across loop iterations in non-bulk "
|
||||||
|
"modes; keep using samples option value instead"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
sample_help = ("Number of data samples to parse; normally applies to first loop "
|
sample_help = (
|
||||||
"iteration only, default: loop interval, if set, else all available " +
|
"Number of data samples to parse; normally applies to first loop "
|
||||||
"samples")
|
"iteration only, default: loop interval, if set, else all available "
|
||||||
no_counter_help = ("Don't track sample counter across loop iterations; keep using "
|
+ "samples"
|
||||||
"samples option value instead")
|
)
|
||||||
|
no_counter_help = (
|
||||||
|
"Don't track sample counter across loop iterations; keep using "
|
||||||
|
"samples option value instead"
|
||||||
|
)
|
||||||
group.add_argument("-s", "--samples", type=int, help=sample_help)
|
group.add_argument("-s", "--samples", type=int, help=sample_help)
|
||||||
group.add_argument("-j", "--no-counter", action="store_true", help=no_counter_help)
|
group.add_argument("-j", "--no-counter", action="store_true", help=no_counter_help)
|
||||||
|
|
||||||
|
@ -114,11 +137,13 @@ def run_arg_parser(parser, need_id=False, no_stdout_errors=False, modes=None):
|
||||||
modes = STATUS_MODES + HISTORY_STATS_MODES + UNGROUPED_MODES
|
modes = STATUS_MODES + HISTORY_STATS_MODES + UNGROUPED_MODES
|
||||||
if parser.bulk_history:
|
if parser.bulk_history:
|
||||||
modes.append("bulk_history")
|
modes.append("bulk_history")
|
||||||
parser.add_argument("mode",
|
parser.add_argument(
|
||||||
|
"mode",
|
||||||
nargs="+",
|
nargs="+",
|
||||||
choices=modes,
|
choices=modes,
|
||||||
help="The data group to record, one or more of: " + ", ".join(modes),
|
help="The data group to record, one or more of: " + ", ".join(modes),
|
||||||
metavar="mode")
|
metavar="mode",
|
||||||
|
)
|
||||||
|
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
|
|
||||||
|
@ -163,6 +188,7 @@ def conn_error(opts, msg, *args):
|
||||||
|
|
||||||
class GlobalState:
|
class GlobalState:
|
||||||
"""A class for keeping state across loop iterations."""
|
"""A class for keeping state across loop iterations."""
|
||||||
|
|
||||||
def __init__(self, target=None):
|
def __init__(self, target=None):
|
||||||
# counter, timestamp for bulk_history:
|
# counter, timestamp for bulk_history:
|
||||||
self.counter = None
|
self.counter = None
|
||||||
|
@ -227,7 +253,9 @@ def get_data(opts, gstate, add_item, add_sequence, add_bulk=None, flush_history=
|
||||||
rc, status_ts = get_status_data(opts, gstate, add_item, add_sequence)
|
rc, status_ts = get_status_data(opts, gstate, add_item, add_sequence)
|
||||||
|
|
||||||
if opts.history_stats_mode and (not rc or opts.poll_loops > 1):
|
if opts.history_stats_mode and (not rc or opts.poll_loops > 1):
|
||||||
hist_rc, hist_ts = get_history_stats(opts, gstate, add_item, add_sequence, flush_history)
|
hist_rc, hist_ts = get_history_stats(
|
||||||
|
opts, gstate, add_item, add_sequence, flush_history
|
||||||
|
)
|
||||||
if not rc:
|
if not rc:
|
||||||
rc = hist_rc
|
rc = hist_rc
|
||||||
|
|
||||||
|
@ -252,10 +280,12 @@ def add_data_numeric(data, category, add_item, add_sequence):
|
||||||
if seq is None:
|
if seq is None:
|
||||||
add_item(name, int(val) if isinstance(val, int) else val, category)
|
add_item(name, int(val) if isinstance(val, int) else val, category)
|
||||||
else:
|
else:
|
||||||
add_sequence(name,
|
add_sequence(
|
||||||
|
name,
|
||||||
[int(subval) if isinstance(subval, int) else subval for subval in val],
|
[int(subval) if isinstance(subval, int) else subval for subval in val],
|
||||||
category,
|
category,
|
||||||
int(start) if start else 0)
|
int(start) if start else 0,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_status_data(opts, gstate, add_item, add_sequence):
|
def get_status_data(opts, gstate, add_item, add_sequence):
|
||||||
|
@ -269,7 +299,10 @@ def get_status_data(opts, gstate, add_item, add_sequence):
|
||||||
except starlink_grpc.GrpcError as e:
|
except starlink_grpc.GrpcError as e:
|
||||||
if "status" in opts.mode:
|
if "status" in opts.mode:
|
||||||
if opts.need_id and gstate.dish_id is None:
|
if opts.need_id and gstate.dish_id is None:
|
||||||
conn_error(opts, "Dish unreachable and ID unknown, so not recording state")
|
conn_error(
|
||||||
|
opts,
|
||||||
|
"Dish unreachable and ID unknown, so not recording state",
|
||||||
|
)
|
||||||
return 1, None
|
return 1, None
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Dish unreachable")
|
print("Dish unreachable")
|
||||||
|
@ -293,7 +326,9 @@ def get_status_data(opts, gstate, add_item, add_sequence):
|
||||||
conn_error(opts, "Failure getting location: %s", str(e))
|
conn_error(opts, "Failure getting location: %s", str(e))
|
||||||
return 1, None
|
return 1, None
|
||||||
if location["latitude"] is None and gstate.warn_once_location:
|
if location["latitude"] is None and gstate.warn_once_location:
|
||||||
logging.warning("Location data not enabled. See README for more details.")
|
logging.warning(
|
||||||
|
"Location data not enabled. See README for more details."
|
||||||
|
)
|
||||||
gstate.warn_once_location = False
|
gstate.warn_once_location = False
|
||||||
add_data(location, "status", add_item, add_sequence)
|
add_data(location, "status", add_item, add_sequence)
|
||||||
return 0, timestamp
|
return 0, timestamp
|
||||||
|
@ -318,8 +353,10 @@ def get_history_stats(opts, gstate, add_item, add_sequence, flush_history):
|
||||||
timestamp = int(time.time())
|
timestamp = int(time.time())
|
||||||
history = starlink_grpc.get_history(context=gstate.context)
|
history = starlink_grpc.get_history(context=gstate.context)
|
||||||
gstate.timestamp_stats = timestamp
|
gstate.timestamp_stats = timestamp
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
conn_error(opts, "Failure getting history: %s", str(starlink_grpc.GrpcError(e)))
|
conn_error(
|
||||||
|
opts, "Failure getting history: %s", str(starlink_grpc.GrpcError(e))
|
||||||
|
)
|
||||||
history = None
|
history = None
|
||||||
|
|
||||||
parse_samples = opts.samples if gstate.counter_stats is None else -1
|
parse_samples = opts.samples if gstate.counter_stats is None else -1
|
||||||
|
@ -329,11 +366,13 @@ def get_history_stats(opts, gstate, add_item, add_sequence, flush_history):
|
||||||
# was a dish reboot.
|
# was a dish reboot.
|
||||||
if gstate.accum_history:
|
if gstate.accum_history:
|
||||||
if history is not None:
|
if history is not None:
|
||||||
gstate.accum_history = starlink_grpc.concatenate_history(gstate.accum_history,
|
gstate.accum_history = starlink_grpc.concatenate_history(
|
||||||
|
gstate.accum_history,
|
||||||
history,
|
history,
|
||||||
samples1=parse_samples,
|
samples1=parse_samples,
|
||||||
start1=start,
|
start1=start,
|
||||||
verbose=opts.verbose)
|
verbose=opts.verbose,
|
||||||
|
)
|
||||||
# Counter tracking gets too complicated to handle across reboots
|
# Counter tracking gets too complicated to handle across reboots
|
||||||
# once the data has been accumulated, so just have concatenate
|
# once the data has been accumulated, so just have concatenate
|
||||||
# handle it on the first polled loop and use a value of 0 to
|
# handle it on the first polled loop and use a value of 0 to
|
||||||
|
@ -354,7 +393,9 @@ def get_history_stats(opts, gstate, add_item, add_sequence, flush_history):
|
||||||
new_samples = gstate.accum_history.current
|
new_samples = gstate.accum_history.current
|
||||||
if new_samples > len(gstate.accum_history.pop_ping_drop_rate):
|
if new_samples > len(gstate.accum_history.pop_ping_drop_rate):
|
||||||
new_samples = len(gstate.accum_history.pop_ping_drop_rate)
|
new_samples = len(gstate.accum_history.pop_ping_drop_rate)
|
||||||
gstate.poll_count = max(gstate.poll_count, int((new_samples-1) / opts.loop_interval))
|
gstate.poll_count = max(
|
||||||
|
gstate.poll_count, int((new_samples - 1) / opts.loop_interval)
|
||||||
|
)
|
||||||
gstate.first_poll = False
|
gstate.first_poll = False
|
||||||
|
|
||||||
if gstate.poll_count < opts.poll_loops - 1 and not flush_history:
|
if gstate.poll_count < opts.poll_loops - 1 and not flush_history:
|
||||||
|
@ -366,10 +407,9 @@ def get_history_stats(opts, gstate, add_item, add_sequence, flush_history):
|
||||||
if gstate.accum_history is None:
|
if gstate.accum_history is None:
|
||||||
return (0, None) if flush_history else (1, None)
|
return (0, None) if flush_history else (1, None)
|
||||||
|
|
||||||
groups = starlink_grpc.history_stats(parse_samples,
|
groups = starlink_grpc.history_stats(
|
||||||
start=start,
|
parse_samples, start=start, verbose=opts.verbose, history=gstate.accum_history
|
||||||
verbose=opts.verbose,
|
)
|
||||||
history=gstate.accum_history)
|
|
||||||
general, ping, runlen, latency, loaded, usage = groups[0:6]
|
general, ping, runlen, latency, loaded, usage = groups[0:6]
|
||||||
add_data = add_data_numeric if opts.numeric else add_data_normal
|
add_data = add_data_numeric if opts.numeric else add_data_normal
|
||||||
add_data(general, "ping_stats", add_item, add_sequence)
|
add_data(general, "ping_stats", add_item, add_sequence)
|
||||||
|
@ -400,10 +440,9 @@ def get_bulk_data(opts, gstate, add_bulk):
|
||||||
start = gstate.counter
|
start = gstate.counter
|
||||||
parse_samples = opts.bulk_samples if start is None else -1
|
parse_samples = opts.bulk_samples if start is None else -1
|
||||||
try:
|
try:
|
||||||
general, bulk = starlink_grpc.history_bulk_data(parse_samples,
|
general, bulk = starlink_grpc.history_bulk_data(
|
||||||
start=start,
|
parse_samples, start=start, verbose=opts.verbose, context=gstate.context
|
||||||
verbose=opts.verbose,
|
)
|
||||||
context=gstate.context)
|
|
||||||
except starlink_grpc.GrpcError as e:
|
except starlink_grpc.GrpcError as e:
|
||||||
conn_error(opts, "Failure getting history: %s", str(e))
|
conn_error(opts, "Failure getting history: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
@ -417,16 +456,26 @@ def get_bulk_data(opts, gstate, add_bulk):
|
||||||
timestamp = None
|
timestamp = None
|
||||||
# Allow up to 2 seconds of time drift before forcibly re-syncing, since
|
# Allow up to 2 seconds of time drift before forcibly re-syncing, since
|
||||||
# +/- 1 second can happen just due to scheduler timing.
|
# +/- 1 second can happen just due to scheduler timing.
|
||||||
if timestamp is not None and not before - 2.0 <= timestamp + parsed_samples <= after + 2.0:
|
if (
|
||||||
|
timestamp is not None
|
||||||
|
and not before - 2.0 <= timestamp + parsed_samples <= after + 2.0
|
||||||
|
):
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Lost sample time sync at: " +
|
print(
|
||||||
str(datetime.fromtimestamp(timestamp + parsed_samples, tz=timezone.utc)))
|
"Lost sample time sync at: "
|
||||||
|
+ str(
|
||||||
|
datetime.fromtimestamp(timestamp + parsed_samples, tz=timezone.utc)
|
||||||
|
)
|
||||||
|
)
|
||||||
timestamp = None
|
timestamp = None
|
||||||
if timestamp is None:
|
if timestamp is None:
|
||||||
timestamp = int(before)
|
timestamp = int(before)
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Establishing new time base: {0} -> {1}".format(
|
print(
|
||||||
new_counter, datetime.fromtimestamp(timestamp, tz=timezone.utc)))
|
"Establishing new time base: {0} -> {1}".format(
|
||||||
|
new_counter, datetime.fromtimestamp(timestamp, tz=timezone.utc)
|
||||||
|
)
|
||||||
|
)
|
||||||
timestamp -= parsed_samples
|
timestamp -= parsed_samples
|
||||||
|
|
||||||
if opts.numeric:
|
if opts.numeric:
|
||||||
|
@ -434,7 +483,11 @@ def get_bulk_data(opts, gstate, add_bulk):
|
||||||
{
|
{
|
||||||
k: [int(subv) if isinstance(subv, int) else subv for subv in v]
|
k: [int(subv) if isinstance(subv, int) else subv for subv in v]
|
||||||
for k, v in bulk.items()
|
for k, v in bulk.items()
|
||||||
}, parsed_samples, timestamp, new_counter - parsed_samples)
|
},
|
||||||
|
parsed_samples,
|
||||||
|
timestamp,
|
||||||
|
new_counter - parsed_samples,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
add_bulk(bulk, parsed_samples, timestamp, new_counter - parsed_samples)
|
add_bulk(bulk, parsed_samples, timestamp, new_counter - parsed_samples)
|
||||||
|
|
119
starlink_grpc_tools/dish_control.py
Normal file
119
starlink_grpc_tools/dish_control.py
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
"""Manipulate operating state of a Starlink user terminal."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import grpc
|
||||||
|
from yagrc import reflector as yagrc_reflector
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description="Starlink user terminal state control")
|
||||||
|
parser.add_argument(
|
||||||
|
"-e",
|
||||||
|
"--target",
|
||||||
|
default="192.168.100.1:9200",
|
||||||
|
help="host:port of dish to query, default is the standard IP address "
|
||||||
|
"and port (192.168.100.1:9200)",
|
||||||
|
)
|
||||||
|
subs = parser.add_subparsers(dest="command", required=True)
|
||||||
|
subs.add_parser("reboot", help="Reboot the user terminal")
|
||||||
|
subs.add_parser("stow", help="Set user terminal to stow position")
|
||||||
|
subs.add_parser("unstow", help="Restore user terminal from stow position")
|
||||||
|
sleep_parser = subs.add_parser(
|
||||||
|
"set_sleep",
|
||||||
|
help="Show, set, or disable power save configuration",
|
||||||
|
description="Run without arguments to show current configuration",
|
||||||
|
)
|
||||||
|
sleep_parser.add_argument(
|
||||||
|
"start", nargs="?", type=int, help="Start time in minutes past midnight UTC"
|
||||||
|
)
|
||||||
|
sleep_parser.add_argument(
|
||||||
|
"duration", nargs="?", type=int, help="Duration in minutes, or 0 to disable"
|
||||||
|
)
|
||||||
|
|
||||||
|
opts = parser.parse_args()
|
||||||
|
if opts.command == "set_sleep" and opts.start is not None:
|
||||||
|
if opts.duration is None:
|
||||||
|
sleep_parser.error("Must specify duration if start time is specified")
|
||||||
|
if opts.start < 0 or opts.start >= 1440:
|
||||||
|
sleep_parser.error("Invalid start time, must be >= 0 and < 1440")
|
||||||
|
if opts.duration < 0 or opts.duration > 1440:
|
||||||
|
sleep_parser.error("Invalid duration, must be >= 0 and <= 1440")
|
||||||
|
return opts
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
opts = parse_args()
|
||||||
|
|
||||||
|
logging.basicConfig(format="%(levelname)s: %(message)s")
|
||||||
|
|
||||||
|
reflector = yagrc_reflector.GrpcReflectionClient()
|
||||||
|
try:
|
||||||
|
with grpc.insecure_channel(opts.target) as channel:
|
||||||
|
reflector.load_protocols(channel, symbols=["SpaceX.API.Device.Device"])
|
||||||
|
stub = reflector.service_stub_class("SpaceX.API.Device.Device")(channel)
|
||||||
|
request_class = reflector.message_class("SpaceX.API.Device.Request")
|
||||||
|
if opts.command == "reboot":
|
||||||
|
request = request_class(reboot={})
|
||||||
|
elif opts.command == "stow":
|
||||||
|
request = request_class(dish_stow={})
|
||||||
|
elif opts.command == "unstow":
|
||||||
|
request = request_class(dish_stow={"unstow": True})
|
||||||
|
else: # set_sleep
|
||||||
|
if opts.start is None and opts.duration is None:
|
||||||
|
request = request_class(dish_get_config={})
|
||||||
|
else:
|
||||||
|
if opts.duration:
|
||||||
|
request = request_class(
|
||||||
|
dish_power_save={
|
||||||
|
"power_save_start_minutes": opts.start,
|
||||||
|
"power_save_duration_minutes": opts.duration,
|
||||||
|
"enable_power_save": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# duration of 0 not allowed, even when disabled
|
||||||
|
request = request_class(
|
||||||
|
dish_power_save={
|
||||||
|
"power_save_duration_minutes": 1,
|
||||||
|
"enable_power_save": False,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
response = stub.Handle(request, timeout=10)
|
||||||
|
|
||||||
|
if (
|
||||||
|
opts.command == "set_sleep"
|
||||||
|
and opts.start is None
|
||||||
|
and opts.duration is None
|
||||||
|
):
|
||||||
|
config = response.dish_get_config.dish_config
|
||||||
|
if config.power_save_mode:
|
||||||
|
print(
|
||||||
|
"Sleep start:",
|
||||||
|
config.power_save_start_minutes,
|
||||||
|
"minutes past midnight UTC",
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
"Sleep duration:", config.power_save_duration_minutes, "minutes"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("Sleep disabled")
|
||||||
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
|
if isinstance(e, grpc.Call):
|
||||||
|
msg = e.details()
|
||||||
|
elif isinstance(e, (AttributeError, ValueError)):
|
||||||
|
msg = "Protocol error"
|
||||||
|
else:
|
||||||
|
msg = "Unknown communication or service error"
|
||||||
|
logging.error(msg)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -31,7 +31,7 @@ import warnings
|
||||||
|
|
||||||
from influxdb import InfluxDBClient
|
from influxdb import InfluxDBClient
|
||||||
|
|
||||||
import dish_common
|
import starlink_grpc_tools.dish_common as dish_common
|
||||||
|
|
||||||
HOST_DEFAULT = "localhost"
|
HOST_DEFAULT = "localhost"
|
||||||
DATABASE_DEFAULT = "starlinkstats"
|
DATABASE_DEFAULT = "starlinkstats"
|
||||||
|
@ -52,41 +52,58 @@ def handle_sigterm(signum, frame):
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = dish_common.create_arg_parser(
|
parser = dish_common.create_arg_parser(
|
||||||
output_description="write it to an InfluxDB 1.x database")
|
output_description="write it to an InfluxDB 1.x database"
|
||||||
|
)
|
||||||
|
|
||||||
group = parser.add_argument_group(title="InfluxDB 1.x database options")
|
group = parser.add_argument_group(title="InfluxDB 1.x database options")
|
||||||
group.add_argument("-n",
|
group.add_argument(
|
||||||
|
"-n",
|
||||||
"--hostname",
|
"--hostname",
|
||||||
default=HOST_DEFAULT,
|
default=HOST_DEFAULT,
|
||||||
dest="host",
|
dest="host",
|
||||||
help="Hostname of InfluxDB server, default: " + HOST_DEFAULT)
|
help="Hostname of InfluxDB server, default: " + HOST_DEFAULT,
|
||||||
group.add_argument("-p", "--port", type=int, help="Port number to use on InfluxDB server")
|
)
|
||||||
group.add_argument("-P", "--password", help="Set password for username/password authentication")
|
group.add_argument(
|
||||||
|
"-p", "--port", type=int, help="Port number to use on InfluxDB server"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-P", "--password", help="Set password for username/password authentication"
|
||||||
|
)
|
||||||
group.add_argument("-U", "--username", help="Set username for authentication")
|
group.add_argument("-U", "--username", help="Set username for authentication")
|
||||||
group.add_argument("-D",
|
group.add_argument(
|
||||||
|
"-D",
|
||||||
"--database",
|
"--database",
|
||||||
default=DATABASE_DEFAULT,
|
default=DATABASE_DEFAULT,
|
||||||
help="Database name to use, default: " + DATABASE_DEFAULT)
|
help="Database name to use, default: " + DATABASE_DEFAULT,
|
||||||
|
)
|
||||||
group.add_argument("-R", "--retention-policy", help="Retention policy name to use")
|
group.add_argument("-R", "--retention-policy", help="Retention policy name to use")
|
||||||
group.add_argument("-k",
|
group.add_argument(
|
||||||
|
"-k",
|
||||||
"--skip-query",
|
"--skip-query",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Skip querying for prior sample write point in bulk mode")
|
help="Skip querying for prior sample write point in bulk mode",
|
||||||
group.add_argument("-C",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-C",
|
||||||
"--ca-cert",
|
"--ca-cert",
|
||||||
dest="verify_ssl",
|
dest="verify_ssl",
|
||||||
help="Enable SSL/TLS using specified CA cert to verify server",
|
help="Enable SSL/TLS using specified CA cert to verify server",
|
||||||
metavar="FILENAME")
|
metavar="FILENAME",
|
||||||
group.add_argument("-I",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-I",
|
||||||
"--insecure",
|
"--insecure",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
dest="verify_ssl",
|
dest="verify_ssl",
|
||||||
help="Enable SSL/TLS but disable certificate verification (INSECURE!)")
|
help="Enable SSL/TLS but disable certificate verification (INSECURE!)",
|
||||||
group.add_argument("-S",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-S",
|
||||||
"--secure",
|
"--secure",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="verify_ssl",
|
dest="verify_ssl",
|
||||||
help="Enable SSL/TLS using default CA cert")
|
help="Enable SSL/TLS using default CA cert",
|
||||||
|
)
|
||||||
|
|
||||||
env_map = (
|
env_map = (
|
||||||
("INFLUXDB_HOST", "host"),
|
("INFLUXDB_HOST", "host"),
|
||||||
|
@ -130,16 +147,20 @@ def parse_args():
|
||||||
def flush_points(opts, gstate):
|
def flush_points(opts, gstate):
|
||||||
try:
|
try:
|
||||||
while len(gstate.points) > MAX_BATCH:
|
while len(gstate.points) > MAX_BATCH:
|
||||||
gstate.influx_client.write_points(gstate.points[:MAX_BATCH],
|
gstate.influx_client.write_points(
|
||||||
|
gstate.points[:MAX_BATCH],
|
||||||
time_precision="s",
|
time_precision="s",
|
||||||
retention_policy=opts.retention_policy)
|
retention_policy=opts.retention_policy,
|
||||||
|
)
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Data points written: " + str(MAX_BATCH))
|
print("Data points written: " + str(MAX_BATCH))
|
||||||
del gstate.points[:MAX_BATCH]
|
del gstate.points[:MAX_BATCH]
|
||||||
if gstate.points:
|
if gstate.points:
|
||||||
gstate.influx_client.write_points(gstate.points,
|
gstate.influx_client.write_points(
|
||||||
|
gstate.points,
|
||||||
time_precision="s",
|
time_precision="s",
|
||||||
retention_policy=opts.retention_policy)
|
retention_policy=opts.retention_policy,
|
||||||
|
)
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Data points written: " + str(len(gstate.points)))
|
print("Data points written: " + str(len(gstate.points)))
|
||||||
gstate.points.clear()
|
gstate.points.clear()
|
||||||
|
@ -159,12 +180,13 @@ def flush_points(opts, gstate):
|
||||||
def query_counter(gstate, start, end):
|
def query_counter(gstate, start, end):
|
||||||
try:
|
try:
|
||||||
# fetch the latest point where counter field was recorded
|
# fetch the latest point where counter field was recorded
|
||||||
result = gstate.influx_client.query("SELECT counter FROM \"{0}\" "
|
result = gstate.influx_client.query(
|
||||||
|
'SELECT counter FROM "{0}" '
|
||||||
"WHERE time>={1}s AND time<{2}s AND id=$id "
|
"WHERE time>={1}s AND time<{2}s AND id=$id "
|
||||||
"ORDER by time DESC LIMIT 1;".format(
|
"ORDER by time DESC LIMIT 1;".format(BULK_MEASUREMENT, start, end),
|
||||||
BULK_MEASUREMENT, start, end),
|
|
||||||
bind_params={"id": gstate.dish_id},
|
bind_params={"id": gstate.dish_id},
|
||||||
epoch="s")
|
epoch="s",
|
||||||
|
)
|
||||||
points = list(result.get_points())
|
points = list(result.get_points())
|
||||||
if points:
|
if points:
|
||||||
counter = points[0].get("counter", None)
|
counter = points[0].get("counter", None)
|
||||||
|
@ -177,22 +199,28 @@ def query_counter(gstate, start, end):
|
||||||
# query(), so just skip this functionality.
|
# query(), so just skip this functionality.
|
||||||
logging.error(
|
logging.error(
|
||||||
"Failed running query, probably due to influxdb-python version too old. "
|
"Failed running query, probably due to influxdb-python version too old. "
|
||||||
"Skipping resumption from prior counter value. Reported error was: %s", str(e))
|
"Skipping resumption from prior counter value. Reported error was: %s",
|
||||||
|
str(e),
|
||||||
|
)
|
||||||
|
|
||||||
return None, 0
|
return None, 0
|
||||||
|
|
||||||
|
|
||||||
def sync_timebase(opts, gstate):
|
def sync_timebase(opts, gstate):
|
||||||
try:
|
try:
|
||||||
db_counter, db_timestamp = query_counter(gstate, gstate.start_timestamp, gstate.timestamp)
|
db_counter, db_timestamp = query_counter(
|
||||||
|
gstate, gstate.start_timestamp, gstate.timestamp
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# could be temporary outage, so try again next time
|
# could be temporary outage, so try again next time
|
||||||
dish_common.conn_error(opts, "Failed querying InfluxDB for prior count: %s", str(e))
|
dish_common.conn_error(
|
||||||
|
opts, "Failed querying InfluxDB for prior count: %s", str(e)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
gstate.timebase_synced = True
|
gstate.timebase_synced = True
|
||||||
|
|
||||||
if db_counter and gstate.start_counter <= db_counter:
|
if db_counter and gstate.start_counter <= db_counter:
|
||||||
del gstate.deferred_points[:db_counter - gstate.start_counter]
|
del gstate.deferred_points[: db_counter - gstate.start_counter]
|
||||||
if gstate.deferred_points:
|
if gstate.deferred_points:
|
||||||
delta_timestamp = db_timestamp - (gstate.deferred_points[0]["time"] - 1)
|
delta_timestamp = db_timestamp - (gstate.deferred_points[0]["time"] - 1)
|
||||||
# to prevent +/- 1 second timestamp drift when the script restarts,
|
# to prevent +/- 1 second timestamp drift when the script restarts,
|
||||||
|
@ -203,8 +231,12 @@ def sync_timebase(opts, gstate):
|
||||||
print("Exactly synced with database time base")
|
print("Exactly synced with database time base")
|
||||||
elif -2 <= delta_timestamp <= 2:
|
elif -2 <= delta_timestamp <= 2:
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Replacing with existing time base: {0} -> {1}".format(
|
print(
|
||||||
db_counter, datetime.fromtimestamp(db_timestamp, tz=timezone.utc)))
|
"Replacing with existing time base: {0} -> {1}".format(
|
||||||
|
db_counter,
|
||||||
|
datetime.fromtimestamp(db_timestamp, tz=timezone.utc),
|
||||||
|
)
|
||||||
|
)
|
||||||
for point in gstate.deferred_points:
|
for point in gstate.deferred_points:
|
||||||
db_timestamp += 1
|
db_timestamp += 1
|
||||||
if point["time"] + delta_timestamp == db_timestamp:
|
if point["time"] + delta_timestamp == db_timestamp:
|
||||||
|
@ -216,7 +248,11 @@ def sync_timebase(opts, gstate):
|
||||||
gstate.timestamp = db_timestamp
|
gstate.timestamp = db_timestamp
|
||||||
else:
|
else:
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Database time base out of sync by {0} seconds".format(delta_timestamp))
|
print(
|
||||||
|
"Database time base out of sync by {0} seconds".format(
|
||||||
|
delta_timestamp
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
gstate.points.extend(gstate.deferred_points)
|
gstate.points.extend(gstate.deferred_points)
|
||||||
gstate.deferred_points.clear()
|
gstate.deferred_points.clear()
|
||||||
|
@ -239,38 +275,42 @@ def loop_body(opts, gstate, shutdown=False):
|
||||||
points = gstate.points if gstate.timebase_synced else gstate.deferred_points
|
points = gstate.points if gstate.timebase_synced else gstate.deferred_points
|
||||||
for i in range(count):
|
for i in range(count):
|
||||||
timestamp += 1
|
timestamp += 1
|
||||||
points.append({
|
points.append(
|
||||||
|
{
|
||||||
"measurement": BULK_MEASUREMENT,
|
"measurement": BULK_MEASUREMENT,
|
||||||
"tags": {
|
"tags": {"id": gstate.dish_id},
|
||||||
"id": gstate.dish_id
|
|
||||||
},
|
|
||||||
"time": timestamp,
|
"time": timestamp,
|
||||||
"fields": {key: val[i] for key, val in bulk.items() if val[i] is not None},
|
"fields": {
|
||||||
})
|
key: val[i] for key, val in bulk.items() if val[i] is not None
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
if points:
|
if points:
|
||||||
# save off counter value for script restart
|
# save off counter value for script restart
|
||||||
points[-1]["fields"]["counter"] = counter + count
|
points[-1]["fields"]["counter"] = counter + count
|
||||||
|
|
||||||
rc, status_ts, hist_ts = dish_common.get_data(opts,
|
rc, status_ts, hist_ts = dish_common.get_data(
|
||||||
|
opts,
|
||||||
gstate,
|
gstate,
|
||||||
cb_add_item,
|
cb_add_item,
|
||||||
cb_add_sequence,
|
cb_add_sequence,
|
||||||
add_bulk=cb_add_bulk,
|
add_bulk=cb_add_bulk,
|
||||||
flush_history=shutdown)
|
flush_history=shutdown,
|
||||||
|
)
|
||||||
if rc:
|
if rc:
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
for category, cat_fields in fields.items():
|
for category, cat_fields in fields.items():
|
||||||
if cat_fields:
|
if cat_fields:
|
||||||
timestamp = status_ts if category == "status" else hist_ts
|
timestamp = status_ts if category == "status" else hist_ts
|
||||||
gstate.points.append({
|
gstate.points.append(
|
||||||
|
{
|
||||||
"measurement": "spacex.starlink.user_terminal." + category,
|
"measurement": "spacex.starlink.user_terminal." + category,
|
||||||
"tags": {
|
"tags": {"id": gstate.dish_id},
|
||||||
"id": gstate.dish_id
|
|
||||||
},
|
|
||||||
"time": timestamp,
|
"time": timestamp,
|
||||||
"fields": cat_fields,
|
"fields": cat_fields,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
# This is here and not before the points being processed because if the
|
# This is here and not before the points being processed because if the
|
||||||
# query previously failed, there will be points that were processed in
|
# query previously failed, there will be points that were processed in
|
||||||
|
@ -306,7 +346,9 @@ def main():
|
||||||
signal.signal(signal.SIGTERM, handle_sigterm)
|
signal.signal(signal.SIGTERM, handle_sigterm)
|
||||||
try:
|
try:
|
||||||
# attempt to hack around breakage between influxdb-python client and 2.0 server:
|
# attempt to hack around breakage between influxdb-python client and 2.0 server:
|
||||||
gstate.influx_client = InfluxDBClient(**opts.icargs, headers={"Accept": "application/json"})
|
gstate.influx_client = InfluxDBClient(
|
||||||
|
**opts.icargs, headers={"Accept": "application/json"}
|
||||||
|
)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
# ...unless influxdb-python package version is too old
|
# ...unless influxdb-python package version is too old
|
||||||
gstate.influx_client = InfluxDBClient(**opts.icargs)
|
gstate.influx_client = InfluxDBClient(**opts.icargs)
|
|
@ -29,9 +29,11 @@ import sys
|
||||||
import time
|
import time
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from influxdb_client import InfluxDBClient, WriteOptions, WritePrecision
|
from influxdb_client import InfluxDBClient
|
||||||
|
from influxdb_client import WriteOptions
|
||||||
|
from influxdb_client import WritePrecision
|
||||||
|
|
||||||
import dish_common
|
import starlink_grpc_tools.dish_common as dish_common
|
||||||
|
|
||||||
URL_DEFAULT = "http://localhost:8086"
|
URL_DEFAULT = "http://localhost:8086"
|
||||||
BUCKET_DEFAULT = "starlinkstats"
|
BUCKET_DEFAULT = "starlinkstats"
|
||||||
|
@ -52,34 +54,45 @@ def handle_sigterm(signum, frame):
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = dish_common.create_arg_parser(
|
parser = dish_common.create_arg_parser(
|
||||||
output_description="write it to an InfluxDB 2.x database")
|
output_description="write it to an InfluxDB 2.x database"
|
||||||
|
)
|
||||||
|
|
||||||
group = parser.add_argument_group(title="InfluxDB 2.x database options")
|
group = parser.add_argument_group(title="InfluxDB 2.x database options")
|
||||||
group.add_argument("-u",
|
group.add_argument(
|
||||||
|
"-u",
|
||||||
"--url",
|
"--url",
|
||||||
default=URL_DEFAULT,
|
default=URL_DEFAULT,
|
||||||
dest="url",
|
dest="url",
|
||||||
help="URL of the InfluxDB 2.x server, default: " + URL_DEFAULT)
|
help="URL of the InfluxDB 2.x server, default: " + URL_DEFAULT,
|
||||||
|
)
|
||||||
group.add_argument("-T", "--token", help="Token to access the bucket")
|
group.add_argument("-T", "--token", help="Token to access the bucket")
|
||||||
group.add_argument("-B",
|
group.add_argument(
|
||||||
|
"-B",
|
||||||
"--bucket",
|
"--bucket",
|
||||||
default=BUCKET_DEFAULT,
|
default=BUCKET_DEFAULT,
|
||||||
help="Bucket name to use, default: " + BUCKET_DEFAULT)
|
help="Bucket name to use, default: " + BUCKET_DEFAULT,
|
||||||
|
)
|
||||||
group.add_argument("-O", "--org", help="Organisation name")
|
group.add_argument("-O", "--org", help="Organisation name")
|
||||||
group.add_argument("-k",
|
group.add_argument(
|
||||||
|
"-k",
|
||||||
"--skip-query",
|
"--skip-query",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Skip querying for prior sample write point in bulk mode")
|
help="Skip querying for prior sample write point in bulk mode",
|
||||||
group.add_argument("-C",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-C",
|
||||||
"--ca-cert",
|
"--ca-cert",
|
||||||
dest="ssl_ca_cert",
|
dest="ssl_ca_cert",
|
||||||
help="Use specified CA cert to verify HTTPS server",
|
help="Use specified CA cert to verify HTTPS server",
|
||||||
metavar="FILENAME")
|
metavar="FILENAME",
|
||||||
group.add_argument("-I",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-I",
|
||||||
"--insecure",
|
"--insecure",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
dest="verify_ssl",
|
dest="verify_ssl",
|
||||||
help="Disable certificate verification of HTTPS server (INSECURE!)")
|
help="Disable certificate verification of HTTPS server (INSECURE!)",
|
||||||
|
)
|
||||||
|
|
||||||
env_map = (
|
env_map = (
|
||||||
("INFLUXDB_URL", "url"),
|
("INFLUXDB_URL", "url"),
|
||||||
|
@ -112,8 +125,9 @@ def parse_args():
|
||||||
if val is not None:
|
if val is not None:
|
||||||
opts.icargs[key] = val
|
opts.icargs[key] = val
|
||||||
|
|
||||||
if (not opts.verify_ssl
|
if (
|
||||||
or opts.ssl_ca_cert is not None) and not opts.url.lower().startswith("https:"):
|
not opts.verify_ssl or opts.ssl_ca_cert is not None
|
||||||
|
) and not opts.url.lower().startswith("https:"):
|
||||||
parser.error("SSL options only apply to HTTPS URLs")
|
parser.error("SSL options only apply to HTTPS URLs")
|
||||||
|
|
||||||
return opts
|
return opts
|
||||||
|
@ -122,25 +136,32 @@ def parse_args():
|
||||||
def flush_points(opts, gstate):
|
def flush_points(opts, gstate):
|
||||||
try:
|
try:
|
||||||
write_api = gstate.influx_client.write_api(
|
write_api = gstate.influx_client.write_api(
|
||||||
write_options=WriteOptions(batch_size=len(gstate.points),
|
write_options=WriteOptions(
|
||||||
|
batch_size=len(gstate.points),
|
||||||
flush_interval=10_000,
|
flush_interval=10_000,
|
||||||
jitter_interval=2_000,
|
jitter_interval=2_000,
|
||||||
retry_interval=5_000,
|
retry_interval=5_000,
|
||||||
max_retries=5,
|
max_retries=5,
|
||||||
max_retry_delay=30_000,
|
max_retry_delay=30_000,
|
||||||
exponential_base=2))
|
exponential_base=2,
|
||||||
|
)
|
||||||
|
)
|
||||||
while len(gstate.points) > MAX_BATCH:
|
while len(gstate.points) > MAX_BATCH:
|
||||||
write_api.write(record=gstate.points[:MAX_BATCH],
|
write_api.write(
|
||||||
|
record=gstate.points[:MAX_BATCH],
|
||||||
write_precision=WritePrecision.S,
|
write_precision=WritePrecision.S,
|
||||||
bucket=opts.bucket)
|
bucket=opts.bucket,
|
||||||
|
)
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Data points written: " + str(MAX_BATCH))
|
print("Data points written: " + str(MAX_BATCH))
|
||||||
del gstate.points[:MAX_BATCH]
|
del gstate.points[:MAX_BATCH]
|
||||||
|
|
||||||
if gstate.points:
|
if gstate.points:
|
||||||
write_api.write(record=gstate.points,
|
write_api.write(
|
||||||
|
record=gstate.points,
|
||||||
write_precision=WritePrecision.S,
|
write_precision=WritePrecision.S,
|
||||||
bucket=opts.bucket)
|
bucket=opts.bucket,
|
||||||
|
)
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Data points written: " + str(len(gstate.points)))
|
print("Data points written: " + str(len(gstate.points)))
|
||||||
gstate.points.clear()
|
gstate.points.clear()
|
||||||
|
@ -161,14 +182,18 @@ def flush_points(opts, gstate):
|
||||||
|
|
||||||
def query_counter(opts, gstate, start, end):
|
def query_counter(opts, gstate, start, end):
|
||||||
query_api = gstate.influx_client.query_api()
|
query_api = gstate.influx_client.query_api()
|
||||||
result = query_api.query('''
|
result = query_api.query(
|
||||||
|
"""
|
||||||
from(bucket: "{0}")
|
from(bucket: "{0}")
|
||||||
|> range(start: {1}, stop: {2})
|
|> range(start: {1}, stop: {2})
|
||||||
|> filter(fn: (r) => r["_measurement"] == "{3}")
|
|> filter(fn: (r) => r["_measurement"] == "{3}")
|
||||||
|> filter(fn: (r) => r["_field"] == "counter")
|
|> filter(fn: (r) => r["_field"] == "counter")
|
||||||
|> last()
|
|> last()
|
||||||
|> yield(name: "last")
|
|> yield(name: "last")
|
||||||
'''.format(opts.bucket, str(start), str(end), BULK_MEASUREMENT))
|
""".format(
|
||||||
|
opts.bucket, str(start), str(end), BULK_MEASUREMENT
|
||||||
|
)
|
||||||
|
)
|
||||||
if result:
|
if result:
|
||||||
counter = result[0].records[0]["_value"]
|
counter = result[0].records[0]["_value"]
|
||||||
timestamp = result[0].records[0]["_time"].timestamp()
|
timestamp = result[0].records[0]["_time"].timestamp()
|
||||||
|
@ -180,16 +205,19 @@ def query_counter(opts, gstate, start, end):
|
||||||
|
|
||||||
def sync_timebase(opts, gstate):
|
def sync_timebase(opts, gstate):
|
||||||
try:
|
try:
|
||||||
db_counter, db_timestamp = query_counter(opts, gstate, gstate.start_timestamp,
|
db_counter, db_timestamp = query_counter(
|
||||||
gstate.timestamp)
|
opts, gstate, gstate.start_timestamp, gstate.timestamp
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# could be temporary outage, so try again next time
|
# could be temporary outage, so try again next time
|
||||||
dish_common.conn_error(opts, "Failed querying InfluxDB for prior count: %s", str(e))
|
dish_common.conn_error(
|
||||||
|
opts, "Failed querying InfluxDB for prior count: %s", str(e)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
gstate.timebase_synced = True
|
gstate.timebase_synced = True
|
||||||
|
|
||||||
if db_counter and gstate.start_counter <= db_counter:
|
if db_counter and gstate.start_counter <= db_counter:
|
||||||
del gstate.deferred_points[:db_counter - gstate.start_counter]
|
del gstate.deferred_points[: db_counter - gstate.start_counter]
|
||||||
if gstate.deferred_points:
|
if gstate.deferred_points:
|
||||||
delta_timestamp = db_timestamp - (gstate.deferred_points[0]["time"] - 1)
|
delta_timestamp = db_timestamp - (gstate.deferred_points[0]["time"] - 1)
|
||||||
# to prevent +/- 1 second timestamp drift when the script restarts,
|
# to prevent +/- 1 second timestamp drift when the script restarts,
|
||||||
|
@ -200,8 +228,12 @@ def sync_timebase(opts, gstate):
|
||||||
print("Exactly synced with database time base")
|
print("Exactly synced with database time base")
|
||||||
elif -2 <= delta_timestamp <= 2:
|
elif -2 <= delta_timestamp <= 2:
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Replacing with existing time base: {0} -> {1}".format(
|
print(
|
||||||
db_counter, datetime.fromtimestamp(db_timestamp, tz=timezone.utc)))
|
"Replacing with existing time base: {0} -> {1}".format(
|
||||||
|
db_counter,
|
||||||
|
datetime.fromtimestamp(db_timestamp, tz=timezone.utc),
|
||||||
|
)
|
||||||
|
)
|
||||||
for point in gstate.deferred_points:
|
for point in gstate.deferred_points:
|
||||||
db_timestamp += 1
|
db_timestamp += 1
|
||||||
if point["time"] + delta_timestamp == db_timestamp:
|
if point["time"] + delta_timestamp == db_timestamp:
|
||||||
|
@ -213,7 +245,11 @@ def sync_timebase(opts, gstate):
|
||||||
gstate.timestamp = db_timestamp
|
gstate.timestamp = db_timestamp
|
||||||
else:
|
else:
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Database time base out of sync by {0} seconds".format(delta_timestamp))
|
print(
|
||||||
|
"Database time base out of sync by {0} seconds".format(
|
||||||
|
delta_timestamp
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
gstate.points.extend(gstate.deferred_points)
|
gstate.points.extend(gstate.deferred_points)
|
||||||
gstate.deferred_points.clear()
|
gstate.deferred_points.clear()
|
||||||
|
@ -236,38 +272,42 @@ def loop_body(opts, gstate, shutdown=False):
|
||||||
points = gstate.points if gstate.timebase_synced else gstate.deferred_points
|
points = gstate.points if gstate.timebase_synced else gstate.deferred_points
|
||||||
for i in range(count):
|
for i in range(count):
|
||||||
timestamp += 1
|
timestamp += 1
|
||||||
points.append({
|
points.append(
|
||||||
|
{
|
||||||
"measurement": BULK_MEASUREMENT,
|
"measurement": BULK_MEASUREMENT,
|
||||||
"tags": {
|
"tags": {"id": gstate.dish_id},
|
||||||
"id": gstate.dish_id
|
|
||||||
},
|
|
||||||
"time": timestamp,
|
"time": timestamp,
|
||||||
"fields": {key: val[i] for key, val in bulk.items() if val[i] is not None},
|
"fields": {
|
||||||
})
|
key: val[i] for key, val in bulk.items() if val[i] is not None
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
if points:
|
if points:
|
||||||
# save off counter value for script restart
|
# save off counter value for script restart
|
||||||
points[-1]["fields"]["counter"] = counter + count
|
points[-1]["fields"]["counter"] = counter + count
|
||||||
|
|
||||||
rc, status_ts, hist_ts = dish_common.get_data(opts,
|
rc, status_ts, hist_ts = dish_common.get_data(
|
||||||
|
opts,
|
||||||
gstate,
|
gstate,
|
||||||
cb_add_item,
|
cb_add_item,
|
||||||
cb_add_sequence,
|
cb_add_sequence,
|
||||||
add_bulk=cb_add_bulk,
|
add_bulk=cb_add_bulk,
|
||||||
flush_history=shutdown)
|
flush_history=shutdown,
|
||||||
|
)
|
||||||
if rc:
|
if rc:
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
for category, cat_fields in fields.items():
|
for category, cat_fields in fields.items():
|
||||||
if cat_fields:
|
if cat_fields:
|
||||||
timestamp = status_ts if category == "status" else hist_ts
|
timestamp = status_ts if category == "status" else hist_ts
|
||||||
gstate.points.append({
|
gstate.points.append(
|
||||||
|
{
|
||||||
"measurement": "spacex.starlink.user_terminal." + category,
|
"measurement": "spacex.starlink.user_terminal." + category,
|
||||||
"tags": {
|
"tags": {"id": gstate.dish_id},
|
||||||
"id": gstate.dish_id
|
|
||||||
},
|
|
||||||
"time": timestamp,
|
"time": timestamp,
|
||||||
"fields": cat_fields,
|
"fields": cat_fields,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
# This is here and not before the points being processed because if the
|
# This is here and not before the points being processed because if the
|
||||||
# query previously failed, there will be points that were processed in
|
# query previously failed, there will be points that were processed in
|
|
@ -29,13 +29,14 @@ import time
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import ssl
|
import ssl
|
||||||
|
|
||||||
ssl_ok = True
|
ssl_ok = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
ssl_ok = False
|
ssl_ok = False
|
||||||
|
|
||||||
import paho.mqtt.publish
|
import paho.mqtt.publish
|
||||||
|
|
||||||
import dish_common
|
import starlink_grpc_tools.dish_common as dish_common
|
||||||
|
|
||||||
HOST_DEFAULT = "localhost"
|
HOST_DEFAULT = "localhost"
|
||||||
|
|
||||||
|
@ -50,16 +51,23 @@ def handle_sigterm(signum, frame):
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = dish_common.create_arg_parser(output_description="publish it to a MQTT broker",
|
parser = dish_common.create_arg_parser(
|
||||||
bulk_history=False)
|
output_description="publish it to a MQTT broker", bulk_history=False
|
||||||
|
)
|
||||||
|
|
||||||
group = parser.add_argument_group(title="MQTT broker options")
|
group = parser.add_argument_group(title="MQTT broker options")
|
||||||
group.add_argument("-n",
|
group.add_argument(
|
||||||
|
"-n",
|
||||||
"--hostname",
|
"--hostname",
|
||||||
default=HOST_DEFAULT,
|
default=HOST_DEFAULT,
|
||||||
help="Hostname of MQTT broker, default: " + HOST_DEFAULT)
|
help="Hostname of MQTT broker, default: " + HOST_DEFAULT,
|
||||||
group.add_argument("-p", "--port", type=int, help="Port number to use on MQTT broker")
|
)
|
||||||
group.add_argument("-P", "--password", help="Set password for username/password authentication")
|
group.add_argument(
|
||||||
|
"-p", "--port", type=int, help="Port number to use on MQTT broker"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-P", "--password", help="Set password for username/password authentication"
|
||||||
|
)
|
||||||
group.add_argument("-U", "--username", help="Set username for authentication")
|
group.add_argument("-U", "--username", help="Set username for authentication")
|
||||||
group.add_argument("-J", "--json", action="store_true", help="Publish data as JSON")
|
group.add_argument("-J", "--json", action="store_true", help="Publish data as JSON")
|
||||||
if ssl_ok:
|
if ssl_ok:
|
||||||
|
@ -67,24 +75,30 @@ def parse_args():
|
||||||
def wrap_ca_arg(arg):
|
def wrap_ca_arg(arg):
|
||||||
return {"ca_certs": arg}
|
return {"ca_certs": arg}
|
||||||
|
|
||||||
group.add_argument("-C",
|
group.add_argument(
|
||||||
|
"-C",
|
||||||
"--ca-cert",
|
"--ca-cert",
|
||||||
type=wrap_ca_arg,
|
type=wrap_ca_arg,
|
||||||
dest="tls",
|
dest="tls",
|
||||||
help="Enable SSL/TLS using specified CA cert to verify broker",
|
help="Enable SSL/TLS using specified CA cert to verify broker",
|
||||||
metavar="FILENAME")
|
metavar="FILENAME",
|
||||||
group.add_argument("-I",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-I",
|
||||||
"--insecure",
|
"--insecure",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
const={"cert_reqs": ssl.CERT_NONE},
|
const={"cert_reqs": ssl.CERT_NONE},
|
||||||
dest="tls",
|
dest="tls",
|
||||||
help="Enable SSL/TLS but disable certificate verification (INSECURE!)")
|
help="Enable SSL/TLS but disable certificate verification (INSECURE!)",
|
||||||
group.add_argument("-S",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-S",
|
||||||
"--secure",
|
"--secure",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
const={},
|
const={},
|
||||||
dest="tls",
|
dest="tls",
|
||||||
help="Enable SSL/TLS using default CA cert")
|
help="Enable SSL/TLS using default CA cert",
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
parser.epilog += "\nSSL support options not available due to missing ssl module"
|
parser.epilog += "\nSSL support options not available due to missing ssl module"
|
||||||
|
|
||||||
|
@ -135,7 +149,6 @@ def loop_body(opts, gstate):
|
||||||
msgs = []
|
msgs = []
|
||||||
|
|
||||||
if opts.json:
|
if opts.json:
|
||||||
|
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
def cb_add_item(key, val, category):
|
def cb_add_item(key, val, category):
|
||||||
|
@ -155,12 +168,24 @@ def loop_body(opts, gstate):
|
||||||
else:
|
else:
|
||||||
|
|
||||||
def cb_add_item(key, val, category):
|
def cb_add_item(key, val, category):
|
||||||
msgs.append(("starlink/dish_{0}/{1}/{2}".format(category, gstate.dish_id,
|
msgs.append(
|
||||||
key), val, 0, False))
|
(
|
||||||
|
"starlink/dish_{0}/{1}/{2}".format(category, gstate.dish_id, key),
|
||||||
|
val,
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def cb_add_sequence(key, val, category, _):
|
def cb_add_sequence(key, val, category, _):
|
||||||
msgs.append(("starlink/dish_{0}/{1}/{2}".format(category, gstate.dish_id, key),
|
msgs.append(
|
||||||
",".join("" if x is None else str(x) for x in val), 0, False))
|
(
|
||||||
|
"starlink/dish_{0}/{1}/{2}".format(category, gstate.dish_id, key),
|
||||||
|
",".join("" if x is None else str(x) for x in val),
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
rc = dish_common.get_data(opts, gstate, cb_add_item, cb_add_sequence)[0]
|
rc = dish_common.get_data(opts, gstate, cb_add_item, cb_add_sequence)[0]
|
||||||
|
|
|
@ -6,13 +6,14 @@ history data and makes it available via HTTP in the format Prometheus expects.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
from http.server import BaseHTTPRequestHandler
|
||||||
|
from http.server import ThreadingHTTPServer
|
||||||
import logging
|
import logging
|
||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
import dish_common
|
import starlink_grpc_tools.dish_common as dish_common
|
||||||
|
|
||||||
|
|
||||||
class Terminated(Exception):
|
class Terminated(Exception):
|
||||||
|
@ -128,14 +129,18 @@ class MetricValue:
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
label_str = ""
|
label_str = ""
|
||||||
if self.labels:
|
if self.labels:
|
||||||
label_str = ("{" + str.join(",", [f'{v[0]}="{v[1]}"'
|
label_str = (
|
||||||
for v in self.labels.items()]) + "}")
|
"{"
|
||||||
|
+ str.join(",", [f'{v[0]}="{v[1]}"' for v in self.labels.items()])
|
||||||
|
+ "}"
|
||||||
|
)
|
||||||
return f"{label_str} {self.value}"
|
return f"{label_str} {self.value}"
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = dish_common.create_arg_parser(output_description="Prometheus exporter",
|
parser = dish_common.create_arg_parser(
|
||||||
bulk_history=False)
|
output_description="Prometheus exporter", bulk_history=False
|
||||||
|
)
|
||||||
|
|
||||||
group = parser.add_argument_group(title="HTTP server options")
|
group = parser.add_argument_group(title="HTTP server options")
|
||||||
group.add_argument("--address", default="0.0.0.0", help="IP address to listen on")
|
group.add_argument("--address", default="0.0.0.0", help="IP address to listen on")
|
||||||
|
@ -155,8 +160,9 @@ def prometheus_export(opts, gstate):
|
||||||
raise NotImplementedError("Did not expect sequence data")
|
raise NotImplementedError("Did not expect sequence data")
|
||||||
|
|
||||||
with gstate.lock:
|
with gstate.lock:
|
||||||
rc, status_ts, hist_ts = dish_common.get_data(opts, gstate, data_add_item,
|
rc, status_ts, hist_ts = dish_common.get_data(
|
||||||
data_add_sequencem)
|
opts, gstate, data_add_item, data_add_sequencem
|
||||||
|
)
|
||||||
|
|
||||||
metrics = []
|
metrics = []
|
||||||
|
|
||||||
|
@ -173,9 +179,11 @@ def prometheus_export(opts, gstate):
|
||||||
MetricValue(
|
MetricValue(
|
||||||
value=int(raw_data["status_state"] == state_value),
|
value=int(raw_data["status_state"] == state_value),
|
||||||
labels={"state": state_value},
|
labels={"state": state_value},
|
||||||
) for state_value in STATE_VALUES
|
)
|
||||||
|
for state_value in STATE_VALUES
|
||||||
],
|
],
|
||||||
))
|
)
|
||||||
|
)
|
||||||
del raw_data["status_state"]
|
del raw_data["status_state"]
|
||||||
|
|
||||||
info_metrics = ["status_id", "status_hardware_version", "status_software_version"]
|
info_metrics = ["status_id", "status_hardware_version", "status_software_version"]
|
||||||
|
@ -191,12 +199,14 @@ def prometheus_export(opts, gstate):
|
||||||
MetricValue(
|
MetricValue(
|
||||||
value=1,
|
value=1,
|
||||||
labels={
|
labels={
|
||||||
x.replace("status_", ""): raw_data.pop(x) for x in info_metrics
|
x.replace("status_", ""): raw_data.pop(x)
|
||||||
|
for x in info_metrics
|
||||||
if x in raw_data
|
if x in raw_data
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
))
|
)
|
||||||
|
)
|
||||||
|
|
||||||
for name, metric_info in METRICS_INFO.items():
|
for name, metric_info in METRICS_INFO.items():
|
||||||
if name in raw_data:
|
if name in raw_data:
|
||||||
|
@ -206,7 +216,8 @@ def prometheus_export(opts, gstate):
|
||||||
timestamp=status_ts,
|
timestamp=status_ts,
|
||||||
kind=metric_info.kind,
|
kind=metric_info.kind,
|
||||||
values=[MetricValue(value=float(raw_data.pop(name) or 0))],
|
values=[MetricValue(value=float(raw_data.pop(name) or 0))],
|
||||||
))
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
metrics_not_found.append(name)
|
metrics_not_found.append(name)
|
||||||
|
|
||||||
|
@ -215,17 +226,22 @@ def prometheus_export(opts, gstate):
|
||||||
name="starlink_exporter_unprocessed_metrics",
|
name="starlink_exporter_unprocessed_metrics",
|
||||||
timestamp=status_ts,
|
timestamp=status_ts,
|
||||||
values=[MetricValue(value=1, labels={"metric": name}) for name in raw_data],
|
values=[MetricValue(value=1, labels={"metric": name}) for name in raw_data],
|
||||||
))
|
)
|
||||||
|
)
|
||||||
|
|
||||||
metrics.append(
|
metrics.append(
|
||||||
Metric(
|
Metric(
|
||||||
name="starlink_exporter_missing_metrics",
|
name="starlink_exporter_missing_metrics",
|
||||||
timestamp=status_ts,
|
timestamp=status_ts,
|
||||||
values=[MetricValue(
|
values=[
|
||||||
|
MetricValue(
|
||||||
value=1,
|
value=1,
|
||||||
labels={"metric": name},
|
labels={"metric": name},
|
||||||
) for name in metrics_not_found],
|
)
|
||||||
))
|
for name in metrics_not_found
|
||||||
|
],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return str.join("\n", [str(metric) for metric in metrics])
|
return str.join("\n", [str(metric) for metric in metrics])
|
||||||
|
|
|
@ -43,8 +43,8 @@ import sqlite3
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import dish_common
|
import starlink_grpc_tools.dish_common as dish_common
|
||||||
import starlink_grpc
|
import starlink_grpc_tools.starlink_grpc as starlink_grpc
|
||||||
|
|
||||||
SCHEMA_VERSION = 4
|
SCHEMA_VERSION = 4
|
||||||
|
|
||||||
|
@ -59,20 +59,26 @@ def handle_sigterm(signum, frame):
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = dish_common.create_arg_parser(output_description="write it to a sqlite database")
|
parser = dish_common.create_arg_parser(
|
||||||
|
output_description="write it to a sqlite database"
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument("database", help="Database file to use")
|
parser.add_argument("database", help="Database file to use")
|
||||||
|
|
||||||
group = parser.add_argument_group(title="sqlite database options")
|
group = parser.add_argument_group(title="sqlite database options")
|
||||||
group.add_argument("-f",
|
group.add_argument(
|
||||||
|
"-f",
|
||||||
"--force",
|
"--force",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Force schema conversion, even if it results in downgrade; may "
|
help="Force schema conversion, even if it results in downgrade; may "
|
||||||
"result in discarded data")
|
"result in discarded data",
|
||||||
group.add_argument("-k",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-k",
|
||||||
"--skip-query",
|
"--skip-query",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Skip querying for prior sample write point in history modes")
|
help="Skip querying for prior sample write point in history modes",
|
||||||
|
)
|
||||||
|
|
||||||
opts = dish_common.run_arg_parser(parser, need_id=True)
|
opts = dish_common.run_arg_parser(parser, need_id=True)
|
||||||
|
|
||||||
|
@ -86,14 +92,19 @@ def query_counter(opts, gstate, column, table):
|
||||||
cur = gstate.sql_conn.cursor()
|
cur = gstate.sql_conn.cursor()
|
||||||
cur.execute(
|
cur.execute(
|
||||||
'SELECT "time", "{0}" FROM "{1}" WHERE "time"<? AND "id"=? '
|
'SELECT "time", "{0}" FROM "{1}" WHERE "time"<? AND "id"=? '
|
||||||
'ORDER BY "time" DESC LIMIT 1'.format(column, table), (now, gstate.dish_id))
|
'ORDER BY "time" DESC LIMIT 1'.format(column, table),
|
||||||
|
(now, gstate.dish_id),
|
||||||
|
)
|
||||||
row = cur.fetchone()
|
row = cur.fetchone()
|
||||||
cur.close()
|
cur.close()
|
||||||
|
|
||||||
if row and row[0] and row[1]:
|
if row and row[0] and row[1]:
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Existing time base: {0} -> {1}".format(
|
print(
|
||||||
row[1], datetime.fromtimestamp(row[0], tz=timezone.utc)))
|
"Existing time base: {0} -> {1}".format(
|
||||||
|
row[1], datetime.fromtimestamp(row[0], tz=timezone.utc)
|
||||||
|
)
|
||||||
|
)
|
||||||
return row
|
return row
|
||||||
else:
|
else:
|
||||||
return 0, None
|
return 0, None
|
||||||
|
@ -108,7 +119,9 @@ def loop_body(opts, gstate, shutdown=False):
|
||||||
tables[category][key] = val
|
tables[category][key] = val
|
||||||
|
|
||||||
def cb_add_sequence(key, val, category, start):
|
def cb_add_sequence(key, val, category, start):
|
||||||
tables[category][key] = ",".join(str(subv) if subv is not None else "" for subv in val)
|
tables[category][key] = ",".join(
|
||||||
|
str(subv) if subv is not None else "" for subv in val
|
||||||
|
)
|
||||||
|
|
||||||
def cb_add_bulk(bulk, count, timestamp, counter):
|
def cb_add_bulk(bulk, count, timestamp, counter):
|
||||||
if len(hist_cols) == 2:
|
if len(hist_cols) == 2:
|
||||||
|
@ -127,19 +140,26 @@ def loop_body(opts, gstate, shutdown=False):
|
||||||
hist_ts = None
|
hist_ts = None
|
||||||
|
|
||||||
if not shutdown:
|
if not shutdown:
|
||||||
rc, status_ts = dish_common.get_status_data(opts, gstate, cb_add_item, cb_add_sequence)
|
rc, status_ts = dish_common.get_status_data(
|
||||||
|
opts, gstate, cb_add_item, cb_add_sequence
|
||||||
|
)
|
||||||
|
|
||||||
if opts.history_stats_mode and (not rc or opts.poll_loops > 1):
|
if opts.history_stats_mode and (not rc or opts.poll_loops > 1):
|
||||||
if gstate.counter_stats is None and not opts.skip_query and opts.samples < 0:
|
if gstate.counter_stats is None and not opts.skip_query and opts.samples < 0:
|
||||||
_, gstate.counter_stats = query_counter(opts, gstate, "end_counter", "ping_stats")
|
_, gstate.counter_stats = query_counter(
|
||||||
hist_rc, hist_ts = dish_common.get_history_stats(opts, gstate, cb_add_item, cb_add_sequence,
|
opts, gstate, "end_counter", "ping_stats"
|
||||||
shutdown)
|
)
|
||||||
|
hist_rc, hist_ts = dish_common.get_history_stats(
|
||||||
|
opts, gstate, cb_add_item, cb_add_sequence, shutdown
|
||||||
|
)
|
||||||
if not rc:
|
if not rc:
|
||||||
rc = hist_rc
|
rc = hist_rc
|
||||||
|
|
||||||
if not shutdown and opts.bulk_mode and not rc:
|
if not shutdown and opts.bulk_mode and not rc:
|
||||||
if gstate.counter is None and not opts.skip_query and opts.bulk_samples < 0:
|
if gstate.counter is None and not opts.skip_query and opts.bulk_samples < 0:
|
||||||
gstate.timestamp, gstate.counter = query_counter(opts, gstate, "counter", "history")
|
gstate.timestamp, gstate.counter = query_counter(
|
||||||
|
opts, gstate, "counter", "history"
|
||||||
|
)
|
||||||
rc = dish_common.get_bulk_data(opts, gstate, cb_add_bulk)
|
rc = dish_common.get_bulk_data(opts, gstate, cb_add_bulk)
|
||||||
|
|
||||||
rows_written = 0
|
rows_written = 0
|
||||||
|
@ -150,9 +170,10 @@ def loop_body(opts, gstate, shutdown=False):
|
||||||
if fields:
|
if fields:
|
||||||
timestamp = status_ts if category == "status" else hist_ts
|
timestamp = status_ts if category == "status" else hist_ts
|
||||||
sql = 'INSERT OR REPLACE INTO "{0}" ("time","id",{1}) VALUES ({2})'.format(
|
sql = 'INSERT OR REPLACE INTO "{0}" ("time","id",{1}) VALUES ({2})'.format(
|
||||||
category, ",".join('"' + x + '"' for x in fields),
|
category,
|
||||||
",".join(repeat("?",
|
",".join('"' + x + '"' for x in fields),
|
||||||
len(fields) + 2)))
|
",".join(repeat("?", len(fields) + 2)),
|
||||||
|
)
|
||||||
values = [timestamp, gstate.dish_id]
|
values = [timestamp, gstate.dish_id]
|
||||||
values.extend(fields.values())
|
values.extend(fields.values())
|
||||||
cur.execute(sql, values)
|
cur.execute(sql, values)
|
||||||
|
@ -160,7 +181,9 @@ def loop_body(opts, gstate, shutdown=False):
|
||||||
|
|
||||||
if hist_rows:
|
if hist_rows:
|
||||||
sql = 'INSERT OR REPLACE INTO "history" ({0}) VALUES({1})'.format(
|
sql = 'INSERT OR REPLACE INTO "history" ({0}) VALUES({1})'.format(
|
||||||
",".join('"' + x + '"' for x in hist_cols), ",".join(repeat("?", len(hist_cols))))
|
",".join('"' + x + '"' for x in hist_cols),
|
||||||
|
",".join(repeat("?", len(hist_cols))),
|
||||||
|
)
|
||||||
cur.executemany(sql, hist_rows)
|
cur.executemany(sql, hist_rows)
|
||||||
rows_written += len(hist_rows)
|
rows_written += len(hist_rows)
|
||||||
|
|
||||||
|
@ -191,7 +214,9 @@ def ensure_schema(opts, conn, context):
|
||||||
print("Initializing new database")
|
print("Initializing new database")
|
||||||
create_tables(conn, context, "")
|
create_tables(conn, context, "")
|
||||||
elif version[0] > SCHEMA_VERSION and not opts.force:
|
elif version[0] > SCHEMA_VERSION and not opts.force:
|
||||||
logging.error("Cowardly refusing to downgrade from schema version %s", version[0])
|
logging.error(
|
||||||
|
"Cowardly refusing to downgrade from schema version %s", version[0]
|
||||||
|
)
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
print("Converting from schema version:", version[0])
|
print("Converting from schema version:", version[0])
|
||||||
|
@ -208,10 +233,12 @@ def ensure_schema(opts, conn, context):
|
||||||
|
|
||||||
def create_tables(conn, context, suffix):
|
def create_tables(conn, context, suffix):
|
||||||
tables = {}
|
tables = {}
|
||||||
name_groups = (starlink_grpc.status_field_names(context=context) +
|
name_groups = starlink_grpc.status_field_names(context=context) + (
|
||||||
(starlink_grpc.location_field_names(),))
|
starlink_grpc.location_field_names(),
|
||||||
type_groups = (starlink_grpc.status_field_types(context=context) +
|
)
|
||||||
(starlink_grpc.location_field_types(),))
|
type_groups = starlink_grpc.status_field_types(context=context) + (
|
||||||
|
starlink_grpc.location_field_types(),
|
||||||
|
)
|
||||||
tables["status"] = zip(name_groups, type_groups)
|
tables["status"] = zip(name_groups, type_groups)
|
||||||
|
|
||||||
name_groups = starlink_grpc.history_stats_field_names()
|
name_groups = starlink_grpc.history_stats_field_names()
|
||||||
|
@ -248,7 +275,8 @@ def create_tables(conn, context, suffix):
|
||||||
column_names.append(name_item)
|
column_names.append(name_item)
|
||||||
cur.execute('DROP TABLE IF EXISTS "{0}{1}"'.format(table, suffix))
|
cur.execute('DROP TABLE IF EXISTS "{0}{1}"'.format(table, suffix))
|
||||||
sql = 'CREATE TABLE "{0}{1}" ({2}, PRIMARY KEY("time","id"))'.format(
|
sql = 'CREATE TABLE "{0}{1}" ({2}, PRIMARY KEY("time","id"))'.format(
|
||||||
table, suffix, ", ".join(columns))
|
table, suffix, ", ".join(columns)
|
||||||
|
)
|
||||||
cur.execute(sql)
|
cur.execute(sql)
|
||||||
column_info[table] = column_names
|
column_info[table] = column_names
|
||||||
cur.close()
|
cur.close()
|
||||||
|
@ -266,9 +294,13 @@ def convert_tables(conn, context):
|
||||||
old_columns = set(x[0] for x in old_cur.description)
|
old_columns = set(x[0] for x in old_cur.description)
|
||||||
new_columns = tuple(x for x in new_columns if x in old_columns)
|
new_columns = tuple(x for x in new_columns if x in old_columns)
|
||||||
sql = 'INSERT OR REPLACE INTO "{0}_new" ({1}) VALUES ({2})'.format(
|
sql = 'INSERT OR REPLACE INTO "{0}_new" ({1}) VALUES ({2})'.format(
|
||||||
table, ",".join('"' + x + '"' for x in new_columns),
|
table,
|
||||||
",".join(repeat("?", len(new_columns))))
|
",".join('"' + x + '"' for x in new_columns),
|
||||||
new_cur.executemany(sql, (tuple(row[col] for col in new_columns) for row in old_cur))
|
",".join(repeat("?", len(new_columns))),
|
||||||
|
)
|
||||||
|
new_cur.executemany(
|
||||||
|
sql, (tuple(row[col] for col in new_columns) for row in old_cur)
|
||||||
|
)
|
||||||
new_cur.execute('DROP TABLE "{0}"'.format(table))
|
new_cur.execute('DROP TABLE "{0}"'.format(table))
|
||||||
new_cur.execute('ALTER TABLE "{0}_new" RENAME TO "{0}"'.format(table))
|
new_cur.execute('ALTER TABLE "{0}_new" RENAME TO "{0}"'.format(table))
|
||||||
old_cur.close()
|
old_cur.close()
|
|
@ -18,15 +18,14 @@ import signal
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import dish_common
|
import starlink_grpc_tools.dish_common as dish_common
|
||||||
import starlink_grpc
|
import starlink_grpc_tools.starlink_grpc as starlink_grpc
|
||||||
|
|
||||||
COUNTER_FIELD = "end_counter"
|
COUNTER_FIELD = "end_counter"
|
||||||
VERBOSE_FIELD_MAP = {
|
VERBOSE_FIELD_MAP = {
|
||||||
# status fields (the remainder are either self-explanatory or I don't
|
# status fields (the remainder are either self-explanatory or I don't
|
||||||
# know with confidence what they mean)
|
# know with confidence what they mean)
|
||||||
"alerts": "Alerts bit field",
|
"alerts": "Alerts bit field",
|
||||||
|
|
||||||
# ping_drop fields
|
# ping_drop fields
|
||||||
"samples": "Parsed samples",
|
"samples": "Parsed samples",
|
||||||
"end_counter": "Sample counter",
|
"end_counter": "Sample counter",
|
||||||
|
@ -38,22 +37,18 @@ VERBOSE_FIELD_MAP = {
|
||||||
"count_unscheduled": "Unscheduled",
|
"count_unscheduled": "Unscheduled",
|
||||||
"total_unscheduled_ping_drop": "Unscheduled ping drop",
|
"total_unscheduled_ping_drop": "Unscheduled ping drop",
|
||||||
"count_full_unscheduled_ping_drop": "Unscheduled drop == 1",
|
"count_full_unscheduled_ping_drop": "Unscheduled drop == 1",
|
||||||
|
|
||||||
# ping_run_length fields
|
# ping_run_length fields
|
||||||
"init_run_fragment": "Initial drop run fragment",
|
"init_run_fragment": "Initial drop run fragment",
|
||||||
"final_run_fragment": "Final drop run fragment",
|
"final_run_fragment": "Final drop run fragment",
|
||||||
"run_seconds": "Per-second drop runs",
|
"run_seconds": "Per-second drop runs",
|
||||||
"run_minutes": "Per-minute drop runs",
|
"run_minutes": "Per-minute drop runs",
|
||||||
|
|
||||||
# ping_latency fields
|
# ping_latency fields
|
||||||
"mean_all_ping_latency": "Mean RTT, drop < 1",
|
"mean_all_ping_latency": "Mean RTT, drop < 1",
|
||||||
"deciles_all_ping_latency": "RTT deciles, drop < 1",
|
"deciles_all_ping_latency": "RTT deciles, drop < 1",
|
||||||
"mean_full_ping_latency": "Mean RTT, drop == 0",
|
"mean_full_ping_latency": "Mean RTT, drop == 0",
|
||||||
"deciles_full_ping_latency": "RTT deciles, drop == 0",
|
"deciles_full_ping_latency": "RTT deciles, drop == 0",
|
||||||
"stdev_full_ping_latency": "RTT standard deviation, drop == 0",
|
"stdev_full_ping_latency": "RTT standard deviation, drop == 0",
|
||||||
|
|
||||||
# ping_loaded_latency is still experimental, so leave those unexplained
|
# ping_loaded_latency is still experimental, so leave those unexplained
|
||||||
|
|
||||||
# usage fields
|
# usage fields
|
||||||
"download_usage": "Bytes downloaded",
|
"download_usage": "Bytes downloaded",
|
||||||
"upload_usage": "Bytes uploaded",
|
"upload_usage": "Bytes uploaded",
|
||||||
|
@ -71,34 +66,52 @@ def handle_sigterm(signum, frame):
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = dish_common.create_arg_parser(
|
parser = dish_common.create_arg_parser(
|
||||||
output_description="print it in text format; by default, will print in CSV format")
|
output_description="print it in text format; by default, will print in CSV format"
|
||||||
|
)
|
||||||
|
|
||||||
group = parser.add_argument_group(title="CSV output options")
|
group = parser.add_argument_group(title="CSV output options")
|
||||||
group.add_argument("-H",
|
group.add_argument(
|
||||||
|
"-H",
|
||||||
"--print-header",
|
"--print-header",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Print CSV header instead of parsing data")
|
help="Print CSV header instead of parsing data",
|
||||||
group.add_argument("-O",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-O",
|
||||||
"--out-file",
|
"--out-file",
|
||||||
default="-",
|
default="-",
|
||||||
help="Output file path; if set, can also be used to resume from prior "
|
help="Output file path; if set, can also be used to resume from prior "
|
||||||
"history sample counter, default: write to standard output")
|
"history sample counter, default: write to standard output",
|
||||||
group.add_argument("-k",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-k",
|
||||||
"--skip-query",
|
"--skip-query",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Skip querying for prior sample write point in history modes")
|
help="Skip querying for prior sample write point in history modes",
|
||||||
|
)
|
||||||
|
|
||||||
opts = dish_common.run_arg_parser(parser)
|
opts = dish_common.run_arg_parser(parser)
|
||||||
|
|
||||||
if (opts.history_stats_mode or opts.status_mode) and opts.bulk_mode and not opts.verbose:
|
if (
|
||||||
|
(opts.history_stats_mode or opts.status_mode)
|
||||||
|
and opts.bulk_mode
|
||||||
|
and not opts.verbose
|
||||||
|
):
|
||||||
parser.error("bulk_history cannot be combined with other modes for CSV output")
|
parser.error("bulk_history cannot be combined with other modes for CSV output")
|
||||||
|
|
||||||
# Technically possible, but a pain to implement, so just disallow it. User
|
# Technically possible, but a pain to implement, so just disallow it. User
|
||||||
# probably doesn't realize how weird it would be, anyway, given that stats
|
# probably doesn't realize how weird it would be, anyway, given that stats
|
||||||
# data reports at a different rate from status data in this case.
|
# data reports at a different rate from status data in this case.
|
||||||
if opts.history_stats_mode and opts.status_mode and not opts.verbose and opts.poll_loops > 1:
|
if (
|
||||||
parser.error("usage of --poll-loops with history stats modes cannot be mixed with status "
|
opts.history_stats_mode
|
||||||
"modes for CSV output")
|
and opts.status_mode
|
||||||
|
and not opts.verbose
|
||||||
|
and opts.poll_loops > 1
|
||||||
|
):
|
||||||
|
parser.error(
|
||||||
|
"usage of --poll-loops with history stats modes cannot be mixed with status "
|
||||||
|
"modes for CSV output"
|
||||||
|
)
|
||||||
|
|
||||||
opts.skip_query |= opts.no_counter | opts.verbose
|
opts.skip_query |= opts.no_counter | opts.verbose
|
||||||
if opts.out_file == "-":
|
if opts.out_file == "-":
|
||||||
|
@ -133,7 +146,9 @@ def print_header(opts, print_file):
|
||||||
try:
|
try:
|
||||||
name_groups = starlink_grpc.status_field_names(context=context)
|
name_groups = starlink_grpc.status_field_names(context=context)
|
||||||
except starlink_grpc.GrpcError as e:
|
except starlink_grpc.GrpcError as e:
|
||||||
dish_common.conn_error(opts, "Failure reflecting status field names: %s", str(e))
|
dish_common.conn_error(
|
||||||
|
opts, "Failure reflecting status field names: %s", str(e)
|
||||||
|
)
|
||||||
return 1
|
return 1
|
||||||
if "status" in opts.mode:
|
if "status" in opts.mode:
|
||||||
header_add(name_groups[0])
|
header_add(name_groups[0])
|
||||||
|
@ -196,8 +211,9 @@ def loop_body(opts, gstate, print_file, shutdown=False):
|
||||||
|
|
||||||
def cb_data_add_item(name, val, category):
|
def cb_data_add_item(name, val, category):
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
csv_data.append("{0:22} {1}".format(
|
csv_data.append(
|
||||||
VERBOSE_FIELD_MAP.get(name, name) + ":", xform(val)))
|
"{0:22} {1}".format(VERBOSE_FIELD_MAP.get(name, name) + ":", xform(val))
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# special case for get_status failure: this will be the lone item added
|
# special case for get_status failure: this will be the lone item added
|
||||||
if name == "state" and val == "DISH_UNREACHABLE":
|
if name == "state" and val == "DISH_UNREACHABLE":
|
||||||
|
@ -207,21 +223,31 @@ def loop_body(opts, gstate, print_file, shutdown=False):
|
||||||
|
|
||||||
def cb_data_add_sequence(name, val, category, start):
|
def cb_data_add_sequence(name, val, category, start):
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
csv_data.append("{0:22} {1}".format(
|
csv_data.append(
|
||||||
|
"{0:22} {1}".format(
|
||||||
VERBOSE_FIELD_MAP.get(name, name) + ":",
|
VERBOSE_FIELD_MAP.get(name, name) + ":",
|
||||||
", ".join(xform(subval) for subval in val)))
|
", ".join(xform(subval) for subval in val),
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
csv_data.extend(xform(subval) for subval in val)
|
csv_data.extend(xform(subval) for subval in val)
|
||||||
|
|
||||||
def cb_add_bulk(bulk, count, timestamp, counter):
|
def cb_add_bulk(bulk, count, timestamp, counter):
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Time range (UTC): {0} -> {1}".format(
|
print(
|
||||||
|
"Time range (UTC): {0} -> {1}".format(
|
||||||
datetime.utcfromtimestamp(timestamp).isoformat(),
|
datetime.utcfromtimestamp(timestamp).isoformat(),
|
||||||
datetime.utcfromtimestamp(timestamp + count).isoformat()),
|
datetime.utcfromtimestamp(timestamp + count).isoformat(),
|
||||||
file=print_file)
|
),
|
||||||
|
file=print_file,
|
||||||
|
)
|
||||||
for key, val in bulk.items():
|
for key, val in bulk.items():
|
||||||
print("{0:22} {1}".format(key + ":", ", ".join(xform(subval) for subval in val)),
|
print(
|
||||||
file=print_file)
|
"{0:22} {1}".format(
|
||||||
|
key + ":", ", ".join(xform(subval) for subval in val)
|
||||||
|
),
|
||||||
|
file=print_file,
|
||||||
|
)
|
||||||
if opts.loop_interval > 0.0:
|
if opts.loop_interval > 0.0:
|
||||||
print(file=print_file)
|
print(file=print_file)
|
||||||
else:
|
else:
|
||||||
|
@ -231,12 +257,14 @@ def loop_body(opts, gstate, print_file, shutdown=False):
|
||||||
fields.extend([xform(val[i]) for val in bulk.values()])
|
fields.extend([xform(val[i]) for val in bulk.values()])
|
||||||
print(",".join(fields), file=print_file)
|
print(",".join(fields), file=print_file)
|
||||||
|
|
||||||
rc, status_ts, hist_ts = dish_common.get_data(opts,
|
rc, status_ts, hist_ts = dish_common.get_data(
|
||||||
|
opts,
|
||||||
gstate,
|
gstate,
|
||||||
cb_data_add_item,
|
cb_data_add_item,
|
||||||
cb_data_add_sequence,
|
cb_data_add_sequence,
|
||||||
add_bulk=cb_add_bulk,
|
add_bulk=cb_add_bulk,
|
||||||
flush_history=shutdown)
|
flush_history=shutdown,
|
||||||
|
)
|
||||||
|
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
if csv_data:
|
if csv_data:
|
|
@ -18,12 +18,16 @@ import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import starlink_json
|
import starlink_grpc_tools.starlink_json as starlink_json
|
||||||
|
|
||||||
BRACKETS_RE = re.compile(r"([^[]*)(\[((\d+),|)(\d*)\]|)$")
|
BRACKETS_RE = re.compile(r"([^[]*)(\[((\d+),|)(\d*)\]|)$")
|
||||||
SAMPLES_DEFAULT = 3600
|
SAMPLES_DEFAULT = 3600
|
||||||
HISTORY_STATS_MODES = [
|
HISTORY_STATS_MODES = [
|
||||||
"ping_drop", "ping_run_length", "ping_latency", "ping_loaded_latency", "usage"
|
"ping_drop",
|
||||||
|
"ping_run_length",
|
||||||
|
"ping_latency",
|
||||||
|
"ping_loaded_latency",
|
||||||
|
"usage",
|
||||||
]
|
]
|
||||||
VERBOSE_FIELD_MAP = {
|
VERBOSE_FIELD_MAP = {
|
||||||
# ping_drop fields
|
# ping_drop fields
|
||||||
|
@ -37,22 +41,18 @@ VERBOSE_FIELD_MAP = {
|
||||||
"count_unscheduled": "Unscheduled",
|
"count_unscheduled": "Unscheduled",
|
||||||
"total_unscheduled_ping_drop": "Unscheduled ping drop",
|
"total_unscheduled_ping_drop": "Unscheduled ping drop",
|
||||||
"count_full_unscheduled_ping_drop": "Unscheduled drop == 1",
|
"count_full_unscheduled_ping_drop": "Unscheduled drop == 1",
|
||||||
|
|
||||||
# ping_run_length fields
|
# ping_run_length fields
|
||||||
"init_run_fragment": "Initial drop run fragment",
|
"init_run_fragment": "Initial drop run fragment",
|
||||||
"final_run_fragment": "Final drop run fragment",
|
"final_run_fragment": "Final drop run fragment",
|
||||||
"run_seconds": "Per-second drop runs",
|
"run_seconds": "Per-second drop runs",
|
||||||
"run_minutes": "Per-minute drop runs",
|
"run_minutes": "Per-minute drop runs",
|
||||||
|
|
||||||
# ping_latency fields
|
# ping_latency fields
|
||||||
"mean_all_ping_latency": "Mean RTT, drop < 1",
|
"mean_all_ping_latency": "Mean RTT, drop < 1",
|
||||||
"deciles_all_ping_latency": "RTT deciles, drop < 1",
|
"deciles_all_ping_latency": "RTT deciles, drop < 1",
|
||||||
"mean_full_ping_latency": "Mean RTT, drop == 0",
|
"mean_full_ping_latency": "Mean RTT, drop == 0",
|
||||||
"deciles_full_ping_latency": "RTT deciles, drop == 0",
|
"deciles_full_ping_latency": "RTT deciles, drop == 0",
|
||||||
"stdev_full_ping_latency": "RTT standard deviation, drop == 0",
|
"stdev_full_ping_latency": "RTT standard deviation, drop == 0",
|
||||||
|
|
||||||
# ping_loaded_latency is still experimental, so leave those unexplained
|
# ping_loaded_latency is still experimental, so leave those unexplained
|
||||||
|
|
||||||
# usage fields
|
# usage fields
|
||||||
"download_usage": "Bytes downloaded",
|
"download_usage": "Bytes downloaded",
|
||||||
"upload_usage": "Bytes uploaded",
|
"upload_usage": "Bytes uploaded",
|
||||||
|
@ -63,42 +63,55 @@ def parse_args():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Collect status and/or history data from a Starlink user terminal and "
|
description="Collect status and/or history data from a Starlink user terminal and "
|
||||||
"print it to standard output in text format; by default, will print in CSV format",
|
"print it to standard output in text format; by default, will print in CSV format",
|
||||||
add_help=False)
|
add_help=False,
|
||||||
|
)
|
||||||
|
|
||||||
group = parser.add_argument_group(title="General options")
|
group = parser.add_argument_group(title="General options")
|
||||||
group.add_argument("-f", "--filename", default="-", help="The file to parse, default: stdin")
|
group.add_argument(
|
||||||
|
"-f", "--filename", default="-", help="The file to parse, default: stdin"
|
||||||
|
)
|
||||||
group.add_argument("-h", "--help", action="help", help="Be helpful")
|
group.add_argument("-h", "--help", action="help", help="Be helpful")
|
||||||
group.add_argument("-t",
|
group.add_argument(
|
||||||
|
"-t",
|
||||||
"--timestamp",
|
"--timestamp",
|
||||||
help="UTC time history data was pulled, as YYYY-MM-DD_HH:MM:SS or as "
|
help="UTC time history data was pulled, as YYYY-MM-DD_HH:MM:SS or as "
|
||||||
"seconds since Unix epoch, default: current time")
|
"seconds since Unix epoch, default: current time",
|
||||||
|
)
|
||||||
group.add_argument("-v", "--verbose", action="store_true", help="Be verbose")
|
group.add_argument("-v", "--verbose", action="store_true", help="Be verbose")
|
||||||
|
|
||||||
group = parser.add_argument_group(title="History mode options")
|
group = parser.add_argument_group(title="History mode options")
|
||||||
group.add_argument("-a",
|
group.add_argument(
|
||||||
|
"-a",
|
||||||
"--all-samples",
|
"--all-samples",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
const=-1,
|
const=-1,
|
||||||
dest="samples",
|
dest="samples",
|
||||||
help="Parse all valid samples")
|
help="Parse all valid samples",
|
||||||
group.add_argument("-s",
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"-s",
|
||||||
"--samples",
|
"--samples",
|
||||||
type=int,
|
type=int,
|
||||||
help="Number of data samples to parse, default: all in bulk mode, "
|
help="Number of data samples to parse, default: all in bulk mode, "
|
||||||
"else " + str(SAMPLES_DEFAULT))
|
"else " + str(SAMPLES_DEFAULT),
|
||||||
|
)
|
||||||
|
|
||||||
group = parser.add_argument_group(title="CSV output options")
|
group = parser.add_argument_group(title="CSV output options")
|
||||||
group.add_argument("-H",
|
group.add_argument(
|
||||||
|
"-H",
|
||||||
"--print-header",
|
"--print-header",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Print CSV header instead of parsing data")
|
help="Print CSV header instead of parsing data",
|
||||||
|
)
|
||||||
|
|
||||||
all_modes = HISTORY_STATS_MODES + ["bulk_history"]
|
all_modes = HISTORY_STATS_MODES + ["bulk_history"]
|
||||||
parser.add_argument("mode",
|
parser.add_argument(
|
||||||
|
"mode",
|
||||||
nargs="+",
|
nargs="+",
|
||||||
choices=all_modes,
|
choices=all_modes,
|
||||||
help="The data group to record, one or more of: " + ", ".join(all_modes),
|
help="The data group to record, one or more of: " + ", ".join(all_modes),
|
||||||
metavar="mode")
|
metavar="mode",
|
||||||
|
)
|
||||||
|
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
|
|
||||||
|
@ -120,11 +133,15 @@ def parse_args():
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try:
|
try:
|
||||||
opts.history_time = int(
|
opts.history_time = int(
|
||||||
datetime.strptime(opts.timestamp, "%Y-%m-%d_%H:%M:%S").timestamp())
|
datetime.strptime(opts.timestamp, "%Y-%m-%d_%H:%M:%S").timestamp()
|
||||||
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
parser.error("Could not parse timestamp")
|
parser.error("Could not parse timestamp")
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Using timestamp", datetime.fromtimestamp(opts.history_time, tz=timezone.utc))
|
print(
|
||||||
|
"Using timestamp",
|
||||||
|
datetime.fromtimestamp(opts.history_time, tz=timezone.utc),
|
||||||
|
)
|
||||||
|
|
||||||
return opts
|
return opts
|
||||||
|
|
||||||
|
@ -177,7 +194,9 @@ def get_data(opts, add_item, add_sequence, add_bulk):
|
||||||
|
|
||||||
if opts.history_stats_mode:
|
if opts.history_stats_mode:
|
||||||
try:
|
try:
|
||||||
groups = starlink_json.history_stats(opts.filename, opts.samples, verbose=opts.verbose)
|
groups = starlink_json.history_stats(
|
||||||
|
opts.filename, opts.samples, verbose=opts.verbose
|
||||||
|
)
|
||||||
except starlink_json.JsonError as e:
|
except starlink_json.JsonError as e:
|
||||||
logging.error("Failure getting history stats: %s", str(e))
|
logging.error("Failure getting history stats: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
|
@ -197,17 +216,20 @@ def get_data(opts, add_item, add_sequence, add_bulk):
|
||||||
if opts.bulk_mode and add_bulk:
|
if opts.bulk_mode and add_bulk:
|
||||||
timestamp = int(time.time()) if opts.history_time is None else opts.history_time
|
timestamp = int(time.time()) if opts.history_time is None else opts.history_time
|
||||||
try:
|
try:
|
||||||
general, bulk = starlink_json.history_bulk_data(opts.filename,
|
general, bulk = starlink_json.history_bulk_data(
|
||||||
opts.samples,
|
opts.filename, opts.samples, verbose=opts.verbose
|
||||||
verbose=opts.verbose)
|
)
|
||||||
except starlink_json.JsonError as e:
|
except starlink_json.JsonError as e:
|
||||||
logging.error("Failure getting bulk history: %s", str(e))
|
logging.error("Failure getting bulk history: %s", str(e))
|
||||||
return 1
|
return 1
|
||||||
parsed_samples = general["samples"]
|
parsed_samples = general["samples"]
|
||||||
new_counter = general["end_counter"]
|
new_counter = general["end_counter"]
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Establishing time base: {0} -> {1}".format(
|
print(
|
||||||
new_counter, datetime.fromtimestamp(timestamp, tz=timezone.utc)))
|
"Establishing time base: {0} -> {1}".format(
|
||||||
|
new_counter, datetime.fromtimestamp(timestamp, tz=timezone.utc)
|
||||||
|
)
|
||||||
|
)
|
||||||
timestamp -= parsed_samples
|
timestamp -= parsed_samples
|
||||||
|
|
||||||
add_bulk(bulk, parsed_samples, timestamp, new_counter - parsed_samples)
|
add_bulk(bulk, parsed_samples, timestamp, new_counter - parsed_samples)
|
||||||
|
@ -219,12 +241,16 @@ def loop_body(opts):
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
csv_data = []
|
csv_data = []
|
||||||
else:
|
else:
|
||||||
history_time = int(time.time()) if opts.history_time is None else opts.history_time
|
history_time = (
|
||||||
|
int(time.time()) if opts.history_time is None else opts.history_time
|
||||||
|
)
|
||||||
csv_data = [datetime.utcfromtimestamp(history_time).isoformat()]
|
csv_data = [datetime.utcfromtimestamp(history_time).isoformat()]
|
||||||
|
|
||||||
def cb_data_add_item(name, val):
|
def cb_data_add_item(name, val):
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
csv_data.append("{0:22} {1}".format(VERBOSE_FIELD_MAP.get(name, name) + ":", val))
|
csv_data.append(
|
||||||
|
"{0:22} {1}".format(VERBOSE_FIELD_MAP.get(name, name) + ":", val)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# special case for get_status failure: this will be the lone item added
|
# special case for get_status failure: this will be the lone item added
|
||||||
if name == "state" and val == "DISH_UNREACHABLE":
|
if name == "state" and val == "DISH_UNREACHABLE":
|
||||||
|
@ -234,23 +260,36 @@ def loop_body(opts):
|
||||||
|
|
||||||
def cb_data_add_sequence(name, val):
|
def cb_data_add_sequence(name, val):
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
csv_data.append("{0:22} {1}".format(
|
csv_data.append(
|
||||||
VERBOSE_FIELD_MAP.get(name, name) + ":", ", ".join(str(subval) for subval in val)))
|
"{0:22} {1}".format(
|
||||||
|
VERBOSE_FIELD_MAP.get(name, name) + ":",
|
||||||
|
", ".join(str(subval) for subval in val),
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
csv_data.extend(str(subval) for subval in val)
|
csv_data.extend(str(subval) for subval in val)
|
||||||
|
|
||||||
def cb_add_bulk(bulk, count, timestamp, counter):
|
def cb_add_bulk(bulk, count, timestamp, counter):
|
||||||
if opts.verbose:
|
if opts.verbose:
|
||||||
print("Time range (UTC): {0} -> {1}".format(
|
print(
|
||||||
|
"Time range (UTC): {0} -> {1}".format(
|
||||||
datetime.utcfromtimestamp(timestamp).isoformat(),
|
datetime.utcfromtimestamp(timestamp).isoformat(),
|
||||||
datetime.utcfromtimestamp(timestamp + count).isoformat()))
|
datetime.utcfromtimestamp(timestamp + count).isoformat(),
|
||||||
|
)
|
||||||
|
)
|
||||||
for key, val in bulk.items():
|
for key, val in bulk.items():
|
||||||
print("{0:22} {1}".format(key + ":", ", ".join(str(subval) for subval in val)))
|
print(
|
||||||
|
"{0:22} {1}".format(
|
||||||
|
key + ":", ", ".join(str(subval) for subval in val)
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
for i in range(count):
|
for i in range(count):
|
||||||
timestamp += 1
|
timestamp += 1
|
||||||
fields = [datetime.utcfromtimestamp(timestamp).isoformat()]
|
fields = [datetime.utcfromtimestamp(timestamp).isoformat()]
|
||||||
fields.extend(["" if val[i] is None else str(val[i]) for val in bulk.values()])
|
fields.extend(
|
||||||
|
["" if val[i] is None else str(val[i]) for val in bulk.values()]
|
||||||
|
)
|
||||||
print(",".join(fields))
|
print(",".join(fields))
|
||||||
|
|
||||||
rc = get_data(opts, cb_data_add_item, cb_data_add_sequence, cb_add_bulk)
|
rc = get_data(opts, cb_data_add_item, cb_data_add_sequence, cb_add_bulk)
|
|
@ -9,11 +9,12 @@ import argparse
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import png
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import starlink_grpc
|
import png
|
||||||
|
|
||||||
|
import starlink_grpc_tools.starlink_grpc as starlink_grpc
|
||||||
|
|
||||||
DEFAULT_OBSTRUCTED_COLOR = "FFFF0000"
|
DEFAULT_OBSTRUCTED_COLOR = "FFFF0000"
|
||||||
DEFAULT_UNOBSTRUCTED_COLOR = "FFFFFFFF"
|
DEFAULT_UNOBSTRUCTED_COLOR = "FFFFFFFF"
|
||||||
|
@ -39,18 +40,28 @@ def loop_body(opts, context):
|
||||||
|
|
||||||
if point >= 0.0:
|
if point >= 0.0:
|
||||||
if opts.greyscale:
|
if opts.greyscale:
|
||||||
yield round(point * opts.unobstructed_color_g +
|
yield round(
|
||||||
(1.0-point) * opts.obstructed_color_g)
|
point * opts.unobstructed_color_g
|
||||||
|
+ (1.0 - point) * opts.obstructed_color_g
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
yield round(point * opts.unobstructed_color_r +
|
yield round(
|
||||||
(1.0-point) * opts.obstructed_color_r)
|
point * opts.unobstructed_color_r
|
||||||
yield round(point * opts.unobstructed_color_g +
|
+ (1.0 - point) * opts.obstructed_color_r
|
||||||
(1.0-point) * opts.obstructed_color_g)
|
)
|
||||||
yield round(point * opts.unobstructed_color_b +
|
yield round(
|
||||||
(1.0-point) * opts.obstructed_color_b)
|
point * opts.unobstructed_color_g
|
||||||
|
+ (1.0 - point) * opts.obstructed_color_g
|
||||||
|
)
|
||||||
|
yield round(
|
||||||
|
point * opts.unobstructed_color_b
|
||||||
|
+ (1.0 - point) * opts.obstructed_color_b
|
||||||
|
)
|
||||||
if not opts.no_alpha:
|
if not opts.no_alpha:
|
||||||
yield round(point * opts.unobstructed_color_a +
|
yield round(
|
||||||
(1.0-point) * opts.obstructed_color_a)
|
point * opts.unobstructed_color_a
|
||||||
|
+ (1.0 - point) * opts.obstructed_color_a
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if opts.greyscale:
|
if opts.greyscale:
|
||||||
yield opts.no_data_color_g
|
yield opts.no_data_color_g
|
||||||
|
@ -67,17 +78,20 @@ def loop_body(opts, context):
|
||||||
else:
|
else:
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
filename = opts.filename.replace("%u", str(now))
|
filename = opts.filename.replace("%u", str(now))
|
||||||
filename = filename.replace("%d",
|
filename = filename.replace(
|
||||||
datetime.utcfromtimestamp(now).strftime("%Y_%m_%d_%H_%M_%S"))
|
"%d", datetime.utcfromtimestamp(now).strftime("%Y_%m_%d_%H_%M_%S")
|
||||||
|
)
|
||||||
filename = filename.replace("%s", str(opts.sequence))
|
filename = filename.replace("%s", str(opts.sequence))
|
||||||
out_file = open(filename, "wb")
|
out_file = open(filename, "wb")
|
||||||
if not snr_data or not snr_data[0]:
|
if not snr_data or not snr_data[0]:
|
||||||
logging.error("Invalid SNR map data: Zero-length")
|
logging.error("Invalid SNR map data: Zero-length")
|
||||||
return 1
|
return 1
|
||||||
writer = png.Writer(len(snr_data[0]),
|
writer = png.Writer(
|
||||||
|
len(snr_data[0]),
|
||||||
len(snr_data),
|
len(snr_data),
|
||||||
alpha=(not opts.no_alpha),
|
alpha=(not opts.no_alpha),
|
||||||
greyscale=opts.greyscale)
|
greyscale=opts.greyscale,
|
||||||
|
)
|
||||||
writer.write(out_file, (bytes(pixel_bytes(row)) for row in snr_data))
|
writer.write(out_file, (bytes(pixel_bytes(row)) for row in snr_data))
|
||||||
out_file.close()
|
out_file.close()
|
||||||
|
|
||||||
|
@ -88,62 +102,89 @@ def loop_body(opts, context):
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Collect directional obstruction map data from a Starlink user terminal and "
|
description="Collect directional obstruction map data from a Starlink user terminal and "
|
||||||
"emit it as a PNG image")
|
"emit it as a PNG image"
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"filename",
|
"filename",
|
||||||
help="The image file to write, or - to write to stdout; may be a template with the "
|
help="The image file to write, or - to write to stdout; may be a template with the "
|
||||||
"following to be filled in per loop iteration: %%s for sequence number, %%d for UTC date "
|
"following to be filled in per loop iteration: %%s for sequence number, %%d for UTC date "
|
||||||
"and time, %%u for seconds since Unix epoch.")
|
"and time, %%u for seconds since Unix epoch.",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-o",
|
"-o",
|
||||||
"--obstructed-color",
|
"--obstructed-color",
|
||||||
help="Color of obstructed areas, in RGB, ARGB, L, or AL hex notation, default: " +
|
help="Color of obstructed areas, in RGB, ARGB, L, or AL hex notation, default: "
|
||||||
DEFAULT_OBSTRUCTED_COLOR + " or " + DEFAULT_OBSTRUCTED_GREYSCALE)
|
+ DEFAULT_OBSTRUCTED_COLOR
|
||||||
|
+ " or "
|
||||||
|
+ DEFAULT_OBSTRUCTED_GREYSCALE,
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-u",
|
"-u",
|
||||||
"--unobstructed-color",
|
"--unobstructed-color",
|
||||||
help="Color of unobstructed areas, in RGB, ARGB, L, or AL hex notation, default: " +
|
help="Color of unobstructed areas, in RGB, ARGB, L, or AL hex notation, default: "
|
||||||
DEFAULT_UNOBSTRUCTED_COLOR + " or " + DEFAULT_UNOBSTRUCTED_GREYSCALE)
|
+ DEFAULT_UNOBSTRUCTED_COLOR
|
||||||
|
+ " or "
|
||||||
|
+ DEFAULT_UNOBSTRUCTED_GREYSCALE,
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-n",
|
"-n",
|
||||||
"--no-data-color",
|
"--no-data-color",
|
||||||
help="Color of areas with no data, in RGB, ARGB, L, or AL hex notation, default: " +
|
help="Color of areas with no data, in RGB, ARGB, L, or AL hex notation, default: "
|
||||||
DEFAULT_NO_DATA_COLOR + " or " + DEFAULT_NO_DATA_GREYSCALE)
|
+ DEFAULT_NO_DATA_COLOR
|
||||||
|
+ " or "
|
||||||
|
+ DEFAULT_NO_DATA_GREYSCALE,
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-g",
|
"-g",
|
||||||
"--greyscale",
|
"--greyscale",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Emit a greyscale image instead of the default full color image; greyscale images "
|
help="Emit a greyscale image instead of the default full color image; greyscale images "
|
||||||
"use L or AL hex notation for the color options")
|
"use L or AL hex notation for the color options",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-z",
|
"-z",
|
||||||
"--no-alpha",
|
"--no-alpha",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Emit an image without alpha (transparency) channel instead of the default that "
|
help="Emit an image without alpha (transparency) channel instead of the default that "
|
||||||
"includes alpha channel")
|
"includes alpha channel",
|
||||||
parser.add_argument("-e",
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-e",
|
||||||
"--target",
|
"--target",
|
||||||
help="host:port of dish to query, default is the standard IP address "
|
help="host:port of dish to query, default is the standard IP address "
|
||||||
"and port (192.168.100.1:9200)")
|
"and port (192.168.100.1:9200)",
|
||||||
parser.add_argument("-t",
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-t",
|
||||||
"--loop-interval",
|
"--loop-interval",
|
||||||
type=float,
|
type=float,
|
||||||
default=float(LOOP_TIME_DEFAULT),
|
default=float(LOOP_TIME_DEFAULT),
|
||||||
help="Loop interval in seconds or 0 for no loop, default: " +
|
help="Loop interval in seconds or 0 for no loop, default: "
|
||||||
str(LOOP_TIME_DEFAULT))
|
+ str(LOOP_TIME_DEFAULT),
|
||||||
parser.add_argument("-s",
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s",
|
||||||
"--sequence",
|
"--sequence",
|
||||||
type=int,
|
type=int,
|
||||||
default=1,
|
default=1,
|
||||||
help="Starting sequence number for templatized filenames, default: 1")
|
help="Starting sequence number for templatized filenames, default: 1",
|
||||||
|
)
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
|
|
||||||
if opts.obstructed_color is None:
|
if opts.obstructed_color is None:
|
||||||
opts.obstructed_color = DEFAULT_OBSTRUCTED_GREYSCALE if opts.greyscale else DEFAULT_OBSTRUCTED_COLOR
|
opts.obstructed_color = (
|
||||||
|
DEFAULT_OBSTRUCTED_GREYSCALE if opts.greyscale else DEFAULT_OBSTRUCTED_COLOR
|
||||||
|
)
|
||||||
if opts.unobstructed_color is None:
|
if opts.unobstructed_color is None:
|
||||||
opts.unobstructed_color = DEFAULT_UNOBSTRUCTED_GREYSCALE if opts.greyscale else DEFAULT_UNOBSTRUCTED_COLOR
|
opts.unobstructed_color = (
|
||||||
|
DEFAULT_UNOBSTRUCTED_GREYSCALE
|
||||||
|
if opts.greyscale
|
||||||
|
else DEFAULT_UNOBSTRUCTED_COLOR
|
||||||
|
)
|
||||||
if opts.no_data_color is None:
|
if opts.no_data_color is None:
|
||||||
opts.no_data_color = DEFAULT_NO_DATA_GREYSCALE if opts.greyscale else DEFAULT_NO_DATA_COLOR
|
opts.no_data_color = (
|
||||||
|
DEFAULT_NO_DATA_GREYSCALE if opts.greyscale else DEFAULT_NO_DATA_COLOR
|
||||||
|
)
|
||||||
|
|
||||||
for option in ("obstructed_color", "unobstructed_color", "no_data_color"):
|
for option in ("obstructed_color", "unobstructed_color", "no_data_color"):
|
||||||
try:
|
try:
|
|
@ -9,8 +9,10 @@ try:
|
||||||
from spacex.api.device import device_pb2
|
from spacex.api.device import device_pb2
|
||||||
from spacex.api.device import device_pb2_grpc
|
from spacex.api.device import device_pb2_grpc
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
print("This script requires the generated gRPC protocol modules. See README file for details.",
|
print(
|
||||||
file=sys.stderr)
|
"This script requires the generated gRPC protocol modules. See README file for details.",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Note that if you remove the 'with' clause here, you need to separately
|
# Note that if you remove the 'with' clause here, you need to separately
|
|
@ -39,32 +39,43 @@ RETRY_DELAY_DEFAULT = 0
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Poll a gRPC reflection server and record a serialized "
|
description="Poll a gRPC reflection server and record a serialized "
|
||||||
"FileDescriptorSet (protoset) of the reflected information")
|
"FileDescriptorSet (protoset) of the reflected information"
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument("outdir",
|
parser.add_argument(
|
||||||
|
"outdir",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
metavar="OUTDIR",
|
metavar="OUTDIR",
|
||||||
help="Directory in which to write protoset files")
|
help="Directory in which to write protoset files",
|
||||||
parser.add_argument("-g",
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-g",
|
||||||
"--target",
|
"--target",
|
||||||
default=TARGET_DEFAULT,
|
default=TARGET_DEFAULT,
|
||||||
help="host:port of device to query, default: " + TARGET_DEFAULT)
|
help="host:port of device to query, default: " + TARGET_DEFAULT,
|
||||||
parser.add_argument("-n",
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-n",
|
||||||
"--print-only",
|
"--print-only",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Print the protoset filename instead of writing the data")
|
help="Print the protoset filename instead of writing the data",
|
||||||
parser.add_argument("-r",
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-r",
|
||||||
"--retry-delay",
|
"--retry-delay",
|
||||||
type=float,
|
type=float,
|
||||||
default=float(RETRY_DELAY_DEFAULT),
|
default=float(RETRY_DELAY_DEFAULT),
|
||||||
help="Time in seconds to wait before retrying after network "
|
help="Time in seconds to wait before retrying after network "
|
||||||
"error or 0 for no retry, default: " + str(RETRY_DELAY_DEFAULT))
|
"error or 0 for no retry, default: " + str(RETRY_DELAY_DEFAULT),
|
||||||
parser.add_argument("-t",
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-t",
|
||||||
"--loop-interval",
|
"--loop-interval",
|
||||||
type=float,
|
type=float,
|
||||||
default=float(LOOP_TIME_DEFAULT),
|
default=float(LOOP_TIME_DEFAULT),
|
||||||
help="Loop interval in seconds or 0 for no loop, default: " +
|
help="Loop interval in seconds or 0 for no loop, default: "
|
||||||
str(LOOP_TIME_DEFAULT))
|
+ str(LOOP_TIME_DEFAULT),
|
||||||
|
)
|
||||||
parser.add_argument("-v", "--verbose", action="store_true", help="Be verbose")
|
parser.add_argument("-v", "--verbose", action="store_true", help="Be verbose")
|
||||||
|
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
|
@ -14,7 +14,7 @@ from datetime import datetime
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import starlink_grpc
|
import starlink_grpc_tools.starlink_grpc as starlink_grpc
|
||||||
|
|
||||||
INITIAL_SAMPLES = 20
|
INITIAL_SAMPLES = 20
|
||||||
LOOP_SLEEP_TIME = 4
|
LOOP_SLEEP_TIME = 4
|
||||||
|
@ -34,7 +34,9 @@ def run_loop(context):
|
||||||
|
|
||||||
# On the other hand, `starlink_grpc.history_bulk_data` will always
|
# On the other hand, `starlink_grpc.history_bulk_data` will always
|
||||||
# return 2 dicts, because that's all the data there is.
|
# return 2 dicts, because that's all the data there is.
|
||||||
general, bulk = starlink_grpc.history_bulk_data(samples, start=counter, context=context)
|
general, bulk = starlink_grpc.history_bulk_data(
|
||||||
|
samples, start=counter, context=context
|
||||||
|
)
|
||||||
except starlink_grpc.GrpcError:
|
except starlink_grpc.GrpcError:
|
||||||
# Dish rebooting maybe, or LAN connectivity error. Just ignore it
|
# Dish rebooting maybe, or LAN connectivity error. Just ignore it
|
||||||
# and hope it goes away.
|
# and hope it goes away.
|
||||||
|
@ -44,14 +46,17 @@ def run_loop(context):
|
||||||
# be replaced with something more useful.
|
# be replaced with something more useful.
|
||||||
|
|
||||||
# This computes a trigger detecting any packet loss (ping drop):
|
# This computes a trigger detecting any packet loss (ping drop):
|
||||||
#triggered = any(x > 0 for x in bulk["pop_ping_drop_rate"])
|
# triggered = any(x > 0 for x in bulk["pop_ping_drop_rate"])
|
||||||
# This computes a trigger detecting samples marked as obstructed:
|
# This computes a trigger detecting samples marked as obstructed:
|
||||||
#triggered = any(bulk["obstructed"])
|
# triggered = any(bulk["obstructed"])
|
||||||
# This computes a trigger detecting samples not marked as scheduled:
|
# This computes a trigger detecting samples not marked as scheduled:
|
||||||
triggered = not all(bulk["scheduled"])
|
triggered = not all(bulk["scheduled"])
|
||||||
if triggered or prev_triggered:
|
if triggered or prev_triggered:
|
||||||
print("Triggered" if triggered else "Continued", "at:",
|
print(
|
||||||
datetime.now(tz=timezone.utc))
|
"Triggered" if triggered else "Continued",
|
||||||
|
"at:",
|
||||||
|
datetime.now(tz=timezone.utc),
|
||||||
|
)
|
||||||
print("status:", status)
|
print("status:", status)
|
||||||
print("history:", bulk)
|
print("history:", bulk)
|
||||||
if not triggered:
|
if not triggered:
|
|
@ -144,6 +144,12 @@ their nature, but the field names are pretty self-explanatory.
|
||||||
*alerts*.
|
*alerts*.
|
||||||
: **alert_power_supply_thermal_throttle** : Alert corresponding with bit 9 (bit
|
: **alert_power_supply_thermal_throttle** : Alert corresponding with bit 9 (bit
|
||||||
mask 512) in *alerts*.
|
mask 512) in *alerts*.
|
||||||
|
: **alert_is_power_save_idle** : Alert corresponding with bit 10 (bit mask
|
||||||
|
1024) in *alerts*.
|
||||||
|
: **alert_moving_while_not_mobile** : Alert corresponding with bit 11 (bit mask
|
||||||
|
2048) in *alerts*.
|
||||||
|
: **alert_moving_fast_while_not_aviation** : Alert corresponding with bit 12
|
||||||
|
(bit mask 4096) in *alerts*.
|
||||||
|
|
||||||
Location data
|
Location data
|
||||||
-------------
|
-------------
|
||||||
|
@ -364,13 +370,21 @@ period.
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
import math
|
import math
|
||||||
import statistics
|
import statistics
|
||||||
from typing import Dict, Iterable, List, Optional, Sequence, Tuple, get_type_hints
|
from typing import Dict
|
||||||
from typing_extensions import TypedDict, get_args
|
from typing import Iterable
|
||||||
|
from typing import List
|
||||||
|
from typing import Optional
|
||||||
|
from typing import Sequence
|
||||||
|
from typing import Tuple
|
||||||
|
from typing import get_type_hints
|
||||||
|
|
||||||
import grpc
|
import grpc
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
from typing_extensions import get_args
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from yagrc import importer
|
from yagrc import importer
|
||||||
|
|
||||||
importer.add_lazy_packages(["spacex.api.device"])
|
importer.add_lazy_packages(["spacex.api.device"])
|
||||||
imports_pending = True
|
imports_pending = True
|
||||||
except (ImportError, AttributeError):
|
except (ImportError, AttributeError):
|
||||||
|
@ -384,11 +398,16 @@ from spacex.api.device import dish_pb2
|
||||||
# prevent hang if the connection goes dead without closing.
|
# prevent hang if the connection goes dead without closing.
|
||||||
REQUEST_TIMEOUT = 10
|
REQUEST_TIMEOUT = 10
|
||||||
|
|
||||||
HISTORY_FIELDS = ("pop_ping_drop_rate", "pop_ping_latency_ms", "downlink_throughput_bps",
|
HISTORY_FIELDS = (
|
||||||
"uplink_throughput_bps")
|
"pop_ping_drop_rate",
|
||||||
|
"pop_ping_latency_ms",
|
||||||
|
"downlink_throughput_bps",
|
||||||
|
"uplink_throughput_bps",
|
||||||
|
)
|
||||||
|
|
||||||
StatusDict = TypedDict(
|
StatusDict = TypedDict(
|
||||||
"StatusDict", {
|
"StatusDict",
|
||||||
|
{
|
||||||
"id": str,
|
"id": str,
|
||||||
"hardware_version": str,
|
"hardware_version": str,
|
||||||
"software_version": str,
|
"software_version": str,
|
||||||
|
@ -409,30 +428,40 @@ StatusDict = TypedDict(
|
||||||
"direction_azimuth": float,
|
"direction_azimuth": float,
|
||||||
"direction_elevation": float,
|
"direction_elevation": float,
|
||||||
"is_snr_above_noise_floor": bool,
|
"is_snr_above_noise_floor": bool,
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
ObstructionDict = TypedDict(
|
ObstructionDict = TypedDict(
|
||||||
"ObstructionDict", {
|
"ObstructionDict",
|
||||||
|
{
|
||||||
"wedges_fraction_obstructed[]": Sequence[Optional[float]],
|
"wedges_fraction_obstructed[]": Sequence[Optional[float]],
|
||||||
"raw_wedges_fraction_obstructed[]": Sequence[Optional[float]],
|
"raw_wedges_fraction_obstructed[]": Sequence[Optional[float]],
|
||||||
"valid_s": float,
|
"valid_s": float,
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
AlertDict = Dict[str, bool]
|
AlertDict = Dict[str, bool]
|
||||||
|
|
||||||
LocationDict = TypedDict("LocationDict", {
|
LocationDict = TypedDict(
|
||||||
|
"LocationDict",
|
||||||
|
{
|
||||||
"latitude": Optional[float],
|
"latitude": Optional[float],
|
||||||
"longitude": Optional[float],
|
"longitude": Optional[float],
|
||||||
"altitude": Optional[float],
|
"altitude": Optional[float],
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
HistGeneralDict = TypedDict("HistGeneralDict", {
|
HistGeneralDict = TypedDict(
|
||||||
|
"HistGeneralDict",
|
||||||
|
{
|
||||||
"samples": int,
|
"samples": int,
|
||||||
"end_counter": int,
|
"end_counter": int,
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
HistBulkDict = TypedDict(
|
HistBulkDict = TypedDict(
|
||||||
"HistBulkDict", {
|
"HistBulkDict",
|
||||||
|
{
|
||||||
"pop_ping_drop_rate": Sequence[float],
|
"pop_ping_drop_rate": Sequence[float],
|
||||||
"pop_ping_latency_ms": Sequence[Optional[float]],
|
"pop_ping_latency_ms": Sequence[Optional[float]],
|
||||||
"downlink_throughput_bps": Sequence[float],
|
"downlink_throughput_bps": Sequence[float],
|
||||||
|
@ -440,10 +469,12 @@ HistBulkDict = TypedDict(
|
||||||
"snr": Sequence[Optional[float]],
|
"snr": Sequence[Optional[float]],
|
||||||
"scheduled": Sequence[Optional[bool]],
|
"scheduled": Sequence[Optional[bool]],
|
||||||
"obstructed": Sequence[Optional[bool]],
|
"obstructed": Sequence[Optional[bool]],
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
PingDropDict = TypedDict(
|
PingDropDict = TypedDict(
|
||||||
"PingDropDict", {
|
"PingDropDict",
|
||||||
|
{
|
||||||
"total_ping_drop": float,
|
"total_ping_drop": float,
|
||||||
"count_full_ping_drop": int,
|
"count_full_ping_drop": int,
|
||||||
"count_obstructed": int,
|
"count_obstructed": int,
|
||||||
|
@ -452,37 +483,47 @@ PingDropDict = TypedDict(
|
||||||
"count_unscheduled": int,
|
"count_unscheduled": int,
|
||||||
"total_unscheduled_ping_drop": float,
|
"total_unscheduled_ping_drop": float,
|
||||||
"count_full_unscheduled_ping_drop": int,
|
"count_full_unscheduled_ping_drop": int,
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
PingDropRlDict = TypedDict(
|
PingDropRlDict = TypedDict(
|
||||||
"PingDropRlDict", {
|
"PingDropRlDict",
|
||||||
|
{
|
||||||
"init_run_fragment": int,
|
"init_run_fragment": int,
|
||||||
"final_run_fragment": int,
|
"final_run_fragment": int,
|
||||||
"run_seconds[1,]": Sequence[int],
|
"run_seconds[1,]": Sequence[int],
|
||||||
"run_minutes[1,]": Sequence[int],
|
"run_minutes[1,]": Sequence[int],
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
PingLatencyDict = TypedDict(
|
PingLatencyDict = TypedDict(
|
||||||
"PingLatencyDict", {
|
"PingLatencyDict",
|
||||||
|
{
|
||||||
"mean_all_ping_latency": float,
|
"mean_all_ping_latency": float,
|
||||||
"deciles_all_ping_latency[]": Sequence[float],
|
"deciles_all_ping_latency[]": Sequence[float],
|
||||||
"mean_full_ping_latency": float,
|
"mean_full_ping_latency": float,
|
||||||
"deciles_full_ping_latency[]": Sequence[float],
|
"deciles_full_ping_latency[]": Sequence[float],
|
||||||
"stdev_full_ping_latency": Optional[float],
|
"stdev_full_ping_latency": Optional[float],
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
LoadedLatencyDict = TypedDict(
|
LoadedLatencyDict = TypedDict(
|
||||||
"LoadedLatencyDict", {
|
"LoadedLatencyDict",
|
||||||
|
{
|
||||||
"load_bucket_samples[]": Sequence[int],
|
"load_bucket_samples[]": Sequence[int],
|
||||||
"load_bucket_min_latency[]": Sequence[Optional[float]],
|
"load_bucket_min_latency[]": Sequence[Optional[float]],
|
||||||
"load_bucket_median_latency[]": Sequence[Optional[float]],
|
"load_bucket_median_latency[]": Sequence[Optional[float]],
|
||||||
"load_bucket_max_latency[]": Sequence[Optional[float]],
|
"load_bucket_max_latency[]": Sequence[Optional[float]],
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
UsageDict = TypedDict("UsageDict", {
|
UsageDict = TypedDict(
|
||||||
|
"UsageDict",
|
||||||
|
{
|
||||||
"download_usage": int,
|
"download_usage": int,
|
||||||
"upload_usage": int,
|
"upload_usage": int,
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
# For legacy reasons, there is a slight difference between the field names
|
# For legacy reasons, there is a slight difference between the field names
|
||||||
# returned in the actual data vs the *_field_names functions. This is a map of
|
# returned in the actual data vs the *_field_names functions. This is a map of
|
||||||
|
@ -528,6 +569,7 @@ def resolve_imports(channel: grpc.Channel):
|
||||||
|
|
||||||
class GrpcError(Exception):
|
class GrpcError(Exception):
|
||||||
"""Provides error info when something went wrong with a gRPC call."""
|
"""Provides error info when something went wrong with a gRPC call."""
|
||||||
|
|
||||||
def __init__(self, e, *args, **kwargs):
|
def __init__(self, e, *args, **kwargs):
|
||||||
# grpc.RpcError is too verbose to print in whole, but it may also be
|
# grpc.RpcError is too verbose to print in whole, but it may also be
|
||||||
# a Call object, and that class has some minimally useful info.
|
# a Call object, and that class has some minimally useful info.
|
||||||
|
@ -535,6 +577,8 @@ class GrpcError(Exception):
|
||||||
msg = e.details()
|
msg = e.details()
|
||||||
elif isinstance(e, grpc.RpcError):
|
elif isinstance(e, grpc.RpcError):
|
||||||
msg = "Unknown communication or service error"
|
msg = "Unknown communication or service error"
|
||||||
|
elif isinstance(e, (AttributeError, IndexError, TypeError, ValueError)):
|
||||||
|
msg = "Protocol error"
|
||||||
else:
|
else:
|
||||||
msg = str(e)
|
msg = str(e)
|
||||||
super().__init__(msg, *args, **kwargs)
|
super().__init__(msg, *args, **kwargs)
|
||||||
|
@ -552,6 +596,7 @@ class ChannelContext:
|
||||||
`close()` should be called on the object when it is no longer
|
`close()` should be called on the object when it is no longer
|
||||||
in use.
|
in use.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, target: Optional[str] = None) -> None:
|
def __init__(self, target: Optional[str] = None) -> None:
|
||||||
self.channel = None
|
self.channel = None
|
||||||
self.target = "192.168.100.1:9200" if target is None else target
|
self.target = "192.168.100.1:9200" if target is None else target
|
||||||
|
@ -569,7 +614,9 @@ class ChannelContext:
|
||||||
self.channel = None
|
self.channel = None
|
||||||
|
|
||||||
|
|
||||||
def call_with_channel(function, *args, context: Optional[ChannelContext] = None, **kwargs):
|
def call_with_channel(
|
||||||
|
function, *args, context: Optional[ChannelContext] = None, **kwargs
|
||||||
|
):
|
||||||
"""Call a function with a channel object.
|
"""Call a function with a channel object.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -617,8 +664,11 @@ def status_field_names(context: Optional[ChannelContext] = None):
|
||||||
except grpc.RpcError as e:
|
except grpc.RpcError as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
alert_names = []
|
alert_names = []
|
||||||
|
try:
|
||||||
for field in dish_pb2.DishAlerts.DESCRIPTOR.fields:
|
for field in dish_pb2.DishAlerts.DESCRIPTOR.fields:
|
||||||
alert_names.append("alert_" + field.name)
|
alert_names.append("alert_" + field.name)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
return _field_names(StatusDict), _field_names(ObstructionDict), alert_names
|
return _field_names(StatusDict), _field_names(ObstructionDict), alert_names
|
||||||
|
|
||||||
|
@ -646,8 +696,16 @@ def status_field_types(context: Optional[ChannelContext] = None):
|
||||||
call_with_channel(resolve_imports, context=context)
|
call_with_channel(resolve_imports, context=context)
|
||||||
except grpc.RpcError as e:
|
except grpc.RpcError as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
return (_field_types(StatusDict), _field_types(ObstructionDict),
|
num_alerts = 0
|
||||||
[bool] * len(dish_pb2.DishAlerts.DESCRIPTOR.fields))
|
try:
|
||||||
|
num_alerts = len(dish_pb2.DishAlerts.DESCRIPTOR.fields)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return (
|
||||||
|
_field_types(StatusDict),
|
||||||
|
_field_types(ObstructionDict),
|
||||||
|
[bool] * num_alerts,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_status(context: Optional[ChannelContext] = None):
|
def get_status(context: Optional[ChannelContext] = None):
|
||||||
|
@ -661,12 +719,18 @@ def get_status(context: Optional[ChannelContext] = None):
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
grpc.RpcError: Communication or service error.
|
grpc.RpcError: Communication or service error.
|
||||||
|
AttributeError, ValueError: Protocol error. Either the target is not a
|
||||||
|
Starlink user terminal or the grpc protocol has changed in a way
|
||||||
|
this module cannot handle.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def grpc_call(channel):
|
def grpc_call(channel):
|
||||||
if imports_pending:
|
if imports_pending:
|
||||||
resolve_imports(channel)
|
resolve_imports(channel)
|
||||||
stub = device_pb2_grpc.DeviceStub(channel)
|
stub = device_pb2_grpc.DeviceStub(channel)
|
||||||
response = stub.Handle(device_pb2.Request(get_status={}), timeout=REQUEST_TIMEOUT)
|
response = stub.Handle(
|
||||||
|
device_pb2.Request(get_status={}), timeout=REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
return response.dish_get_status
|
return response.dish_get_status
|
||||||
|
|
||||||
return call_with_channel(grpc_call, context=context)
|
return call_with_channel(grpc_call, context=context)
|
||||||
|
@ -689,12 +753,13 @@ def get_id(context: Optional[ChannelContext] = None) -> str:
|
||||||
try:
|
try:
|
||||||
status = get_status(context)
|
status = get_status(context)
|
||||||
return status.device_info.id
|
return status.device_info.id
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
|
|
||||||
def status_data(
|
def status_data(
|
||||||
context: Optional[ChannelContext] = None) -> Tuple[StatusDict, ObstructionDict, AlertDict]:
|
context: Optional[ChannelContext] = None,
|
||||||
|
) -> Tuple[StatusDict, ObstructionDict, AlertDict]:
|
||||||
"""Fetch current status data.
|
"""Fetch current status data.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -711,63 +776,100 @@ def status_data(
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
status = get_status(context)
|
status = get_status(context)
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
|
try:
|
||||||
if status.HasField("outage"):
|
if status.HasField("outage"):
|
||||||
if status.outage.cause == dish_pb2.DishOutage.Cause.NO_SCHEDULE:
|
if status.outage.cause == dish_pb2.DishOutage.Cause.NO_SCHEDULE:
|
||||||
# Special case translate this to equivalent old name
|
# Special case translate this to equivalent old name
|
||||||
state = "SEARCHING"
|
state = "SEARCHING"
|
||||||
else:
|
else:
|
||||||
|
try:
|
||||||
state = dish_pb2.DishOutage.Cause.Name(status.outage.cause)
|
state = dish_pb2.DishOutage.Cause.Name(status.outage.cause)
|
||||||
|
except ValueError:
|
||||||
|
# Unlikely, but possible if dish is running newer firmware
|
||||||
|
# than protocol data pulled via reflection
|
||||||
|
state = str(status.outage.cause)
|
||||||
else:
|
else:
|
||||||
state = "CONNECTED"
|
state = "CONNECTED"
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
state = "UNKNOWN"
|
||||||
|
|
||||||
# More alerts may be added in future, so in addition to listing them
|
# More alerts may be added in future, so in addition to listing them
|
||||||
# individually, provide a bit field based on field numbers of the
|
# individually, provide a bit field based on field numbers of the
|
||||||
# DishAlerts message.
|
# DishAlerts message.
|
||||||
alerts = {}
|
alerts = {}
|
||||||
alert_bits = 0
|
alert_bits = 0
|
||||||
|
try:
|
||||||
for field in status.alerts.DESCRIPTOR.fields:
|
for field in status.alerts.DESCRIPTOR.fields:
|
||||||
value = getattr(status.alerts, field.name)
|
value = getattr(status.alerts, field.name, False)
|
||||||
alerts["alert_" + field.name] = value
|
alerts["alert_" + field.name] = value
|
||||||
if field.number < 65:
|
if field.number < 65:
|
||||||
alert_bits |= (1 if value else 0) << (field.number - 1)
|
alert_bits |= (1 if value else 0) << (field.number - 1)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
if (status.obstruction_stats.avg_prolonged_obstruction_duration_s > 0.0
|
|
||||||
and not math.isnan(status.obstruction_stats.avg_prolonged_obstruction_interval_s)):
|
|
||||||
obstruction_duration = status.obstruction_stats.avg_prolonged_obstruction_duration_s
|
|
||||||
obstruction_interval = status.obstruction_stats.avg_prolonged_obstruction_interval_s
|
|
||||||
else:
|
|
||||||
obstruction_duration = None
|
obstruction_duration = None
|
||||||
obstruction_interval = None
|
obstruction_interval = None
|
||||||
|
obstruction_stats = getattr(status, "obstruction_stats", None)
|
||||||
|
if obstruction_stats is not None:
|
||||||
|
try:
|
||||||
|
if (
|
||||||
|
obstruction_stats.avg_prolonged_obstruction_duration_s > 0.0
|
||||||
|
and not math.isnan(
|
||||||
|
obstruction_stats.avg_prolonged_obstruction_interval_s
|
||||||
|
)
|
||||||
|
):
|
||||||
|
obstruction_duration = (
|
||||||
|
obstruction_stats.avg_prolonged_obstruction_duration_s
|
||||||
|
)
|
||||||
|
obstruction_interval = (
|
||||||
|
obstruction_stats.avg_prolonged_obstruction_interval_s
|
||||||
|
)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
return {
|
device_info = getattr(status, "device_info", None)
|
||||||
"id": status.device_info.id,
|
return (
|
||||||
"hardware_version": status.device_info.hardware_version,
|
{
|
||||||
"software_version": status.device_info.software_version,
|
"id": getattr(device_info, "id", None),
|
||||||
|
"hardware_version": getattr(device_info, "hardware_version", None),
|
||||||
|
"software_version": getattr(device_info, "software_version", None),
|
||||||
"state": state,
|
"state": state,
|
||||||
"uptime": status.device_state.uptime_s,
|
"uptime": getattr(getattr(status, "device_state", None), "uptime_s", None),
|
||||||
"snr": None, # obsoleted in grpc service
|
"snr": None, # obsoleted in grpc service
|
||||||
"seconds_to_first_nonempty_slot": status.seconds_to_first_nonempty_slot,
|
"seconds_to_first_nonempty_slot": getattr(
|
||||||
"pop_ping_drop_rate": status.pop_ping_drop_rate,
|
status, "seconds_to_first_nonempty_slot", None
|
||||||
"downlink_throughput_bps": status.downlink_throughput_bps,
|
),
|
||||||
"uplink_throughput_bps": status.uplink_throughput_bps,
|
"pop_ping_drop_rate": getattr(status, "pop_ping_drop_rate", None),
|
||||||
"pop_ping_latency_ms": status.pop_ping_latency_ms,
|
"downlink_throughput_bps": getattr(status, "downlink_throughput_bps", None),
|
||||||
|
"uplink_throughput_bps": getattr(status, "uplink_throughput_bps", None),
|
||||||
|
"pop_ping_latency_ms": getattr(status, "pop_ping_latency_ms", None),
|
||||||
"alerts": alert_bits,
|
"alerts": alert_bits,
|
||||||
"fraction_obstructed": status.obstruction_stats.fraction_obstructed,
|
"fraction_obstructed": getattr(
|
||||||
"currently_obstructed": status.obstruction_stats.currently_obstructed,
|
obstruction_stats, "fraction_obstructed", None
|
||||||
|
),
|
||||||
|
"currently_obstructed": getattr(
|
||||||
|
obstruction_stats, "currently_obstructed", None
|
||||||
|
),
|
||||||
"seconds_obstructed": None, # obsoleted in grpc service
|
"seconds_obstructed": None, # obsoleted in grpc service
|
||||||
"obstruction_duration": obstruction_duration,
|
"obstruction_duration": obstruction_duration,
|
||||||
"obstruction_interval": obstruction_interval,
|
"obstruction_interval": obstruction_interval,
|
||||||
"direction_azimuth": status.boresight_azimuth_deg,
|
"direction_azimuth": getattr(status, "boresight_azimuth_deg", None),
|
||||||
"direction_elevation": status.boresight_elevation_deg,
|
"direction_elevation": getattr(status, "boresight_elevation_deg", None),
|
||||||
"is_snr_above_noise_floor": status.is_snr_above_noise_floor,
|
"is_snr_above_noise_floor": getattr(
|
||||||
}, {
|
status, "is_snr_above_noise_floor", None
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
"wedges_fraction_obstructed[]": [None] * 12, # obsoleted in grpc service
|
"wedges_fraction_obstructed[]": [None] * 12, # obsoleted in grpc service
|
||||||
"raw_wedges_fraction_obstructed[]": [None] * 12, # obsoleted in grpc service
|
"raw_wedges_fraction_obstructed[]": [None]
|
||||||
"valid_s": status.obstruction_stats.valid_s,
|
* 12, # obsoleted in grpc service
|
||||||
}, alerts
|
"valid_s": getattr(obstruction_stats, "valid_s", None),
|
||||||
|
},
|
||||||
|
alerts,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def location_field_names():
|
def location_field_names():
|
||||||
|
@ -801,12 +903,18 @@ def get_location(context: Optional[ChannelContext] = None):
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
grpc.RpcError: Communication or service error.
|
grpc.RpcError: Communication or service error.
|
||||||
|
AttributeError, ValueError: Protocol error. Either the target is not a
|
||||||
|
Starlink user terminal or the grpc protocol has changed in a way
|
||||||
|
this module cannot handle.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def grpc_call(channel):
|
def grpc_call(channel):
|
||||||
if imports_pending:
|
if imports_pending:
|
||||||
resolve_imports(channel)
|
resolve_imports(channel)
|
||||||
stub = device_pb2_grpc.DeviceStub(channel)
|
stub = device_pb2_grpc.DeviceStub(channel)
|
||||||
response = stub.Handle(device_pb2.Request(get_location={}), timeout=REQUEST_TIMEOUT)
|
response = stub.Handle(
|
||||||
|
device_pb2.Request(get_location={}), timeout=REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
return response.get_location
|
return response.get_location
|
||||||
|
|
||||||
return call_with_channel(grpc_call, context=context)
|
return call_with_channel(grpc_call, context=context)
|
||||||
|
@ -829,7 +937,7 @@ def location_data(context: Optional[ChannelContext] = None) -> LocationDict:
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
location = get_location(context)
|
location = get_location(context)
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
if isinstance(e, grpc.Call) and e.code() is grpc.StatusCode.PERMISSION_DENIED:
|
if isinstance(e, grpc.Call) and e.code() is grpc.StatusCode.PERMISSION_DENIED:
|
||||||
return {
|
return {
|
||||||
"latitude": None,
|
"latitude": None,
|
||||||
|
@ -838,11 +946,17 @@ def location_data(context: Optional[ChannelContext] = None) -> LocationDict:
|
||||||
}
|
}
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
|
try:
|
||||||
return {
|
return {
|
||||||
"latitude": location.lla.lat,
|
"latitude": location.lla.lat,
|
||||||
"longitude": location.lla.lon,
|
"longitude": location.lla.lon,
|
||||||
"altitude": location.lla.alt,
|
"altitude": getattr(location.lla, "alt", None),
|
||||||
}
|
}
|
||||||
|
except AttributeError as e:
|
||||||
|
# Allow None for altitude, but since all None values has special
|
||||||
|
# meaning for this function, any other protocol change is flagged as
|
||||||
|
# an error.
|
||||||
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
|
|
||||||
def history_bulk_field_names():
|
def history_bulk_field_names():
|
||||||
|
@ -892,8 +1006,14 @@ def history_stats_field_names():
|
||||||
additional data groups, so it not recommended for the caller to
|
additional data groups, so it not recommended for the caller to
|
||||||
assume exactly 6 elements.
|
assume exactly 6 elements.
|
||||||
"""
|
"""
|
||||||
return (_field_names(HistGeneralDict), _field_names(PingDropDict), _field_names(PingDropRlDict),
|
return (
|
||||||
_field_names(PingLatencyDict), _field_names(LoadedLatencyDict), _field_names(UsageDict))
|
_field_names(HistGeneralDict),
|
||||||
|
_field_names(PingDropDict),
|
||||||
|
_field_names(PingDropRlDict),
|
||||||
|
_field_names(PingLatencyDict),
|
||||||
|
_field_names(LoadedLatencyDict),
|
||||||
|
_field_names(UsageDict),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def history_stats_field_types():
|
def history_stats_field_types():
|
||||||
|
@ -912,8 +1032,14 @@ def history_stats_field_types():
|
||||||
additional data groups, so it not recommended for the caller to
|
additional data groups, so it not recommended for the caller to
|
||||||
assume exactly 6 elements.
|
assume exactly 6 elements.
|
||||||
"""
|
"""
|
||||||
return (_field_types(HistGeneralDict), _field_types(PingDropDict), _field_types(PingDropRlDict),
|
return (
|
||||||
_field_types(PingLatencyDict), _field_types(LoadedLatencyDict), _field_types(UsageDict))
|
_field_types(HistGeneralDict),
|
||||||
|
_field_types(PingDropDict),
|
||||||
|
_field_types(PingDropRlDict),
|
||||||
|
_field_types(PingLatencyDict),
|
||||||
|
_field_types(LoadedLatencyDict),
|
||||||
|
_field_types(UsageDict),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_history(context: Optional[ChannelContext] = None):
|
def get_history(context: Optional[ChannelContext] = None):
|
||||||
|
@ -927,23 +1053,32 @@ def get_history(context: Optional[ChannelContext] = None):
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
grpc.RpcError: Communication or service error.
|
grpc.RpcError: Communication or service error.
|
||||||
|
AttributeError, ValueError: Protocol error. Either the target is not a
|
||||||
|
Starlink user terminal or the grpc protocol has changed in a way
|
||||||
|
this module cannot handle.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def grpc_call(channel: grpc.Channel):
|
def grpc_call(channel: grpc.Channel):
|
||||||
if imports_pending:
|
if imports_pending:
|
||||||
resolve_imports(channel)
|
resolve_imports(channel)
|
||||||
stub = device_pb2_grpc.DeviceStub(channel)
|
stub = device_pb2_grpc.DeviceStub(channel)
|
||||||
response = stub.Handle(device_pb2.Request(get_history={}), timeout=REQUEST_TIMEOUT)
|
response = stub.Handle(
|
||||||
|
device_pb2.Request(get_history={}), timeout=REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
return response.dish_get_history
|
return response.dish_get_history
|
||||||
|
|
||||||
return call_with_channel(grpc_call, context=context)
|
return call_with_channel(grpc_call, context=context)
|
||||||
|
|
||||||
|
|
||||||
def _compute_sample_range(history,
|
def _compute_sample_range(
|
||||||
parse_samples: int,
|
history, parse_samples: int, start: Optional[int] = None, verbose: bool = False
|
||||||
start: Optional[int] = None,
|
):
|
||||||
verbose: bool = False):
|
try:
|
||||||
current = int(history.current)
|
current = int(history.current)
|
||||||
samples = len(history.pop_ping_drop_rate)
|
samples = len(history.pop_ping_drop_rate)
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
# Without current and pop_ping_drop_rate, history is unusable.
|
||||||
|
return range(0), 0, None
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print("current counter: " + str(current))
|
print("current counter: " + str(current))
|
||||||
|
@ -971,7 +1106,7 @@ def _compute_sample_range(history,
|
||||||
|
|
||||||
# Not a ring buffer is simple case.
|
# Not a ring buffer is simple case.
|
||||||
if hasattr(history, "unwrapped"):
|
if hasattr(history, "unwrapped"):
|
||||||
return range(samples - (current-start), samples), current - start, current
|
return range(samples - (current - start), samples), current - start, current
|
||||||
|
|
||||||
# This is ring buffer offset, so both index to oldest data sample and
|
# This is ring buffer offset, so both index to oldest data sample and
|
||||||
# index to next data sample after the newest one.
|
# index to next data sample after the newest one.
|
||||||
|
@ -989,11 +1124,13 @@ def _compute_sample_range(history,
|
||||||
return sample_range, current - start, current
|
return sample_range, current - start, current
|
||||||
|
|
||||||
|
|
||||||
def concatenate_history(history1,
|
def concatenate_history(
|
||||||
|
history1,
|
||||||
history2,
|
history2,
|
||||||
samples1: int = -1,
|
samples1: int = -1,
|
||||||
start1: Optional[int] = None,
|
start1: Optional[int] = None,
|
||||||
verbose: bool = False):
|
verbose: bool = False,
|
||||||
|
):
|
||||||
"""Append the sample-dependent fields of one history object to another.
|
"""Append the sample-dependent fields of one history object to another.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
|
@ -1018,8 +1155,14 @@ def concatenate_history(history1,
|
||||||
An object with the unwrapped history data and the same attribute
|
An object with the unwrapped history data and the same attribute
|
||||||
fields as a grpc history object.
|
fields as a grpc history object.
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
size2 = len(history2.pop_ping_drop_rate)
|
size2 = len(history2.pop_ping_drop_rate)
|
||||||
new_samples = history2.current - history1.current
|
new_samples = history2.current - history1.current
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
# Something is wrong. Probably both history objects are bad, so no
|
||||||
|
# point in trying to combine them.
|
||||||
|
return history1
|
||||||
|
|
||||||
if new_samples < 0:
|
if new_samples < 0:
|
||||||
if verbose:
|
if verbose:
|
||||||
print("Dish reboot detected. Appending anyway.")
|
print("Dish reboot detected. Appending anyway.")
|
||||||
|
@ -1029,34 +1172,54 @@ def concatenate_history(history1,
|
||||||
# but this layer of the code tries not to make that sort of logging
|
# but this layer of the code tries not to make that sort of logging
|
||||||
# policy decision, so honor requested verbosity.
|
# policy decision, so honor requested verbosity.
|
||||||
if verbose:
|
if verbose:
|
||||||
print("WARNING: Appending discontiguous samples. Polling interval probably too short.")
|
print(
|
||||||
|
"WARNING: Appending discontiguous samples. Polling interval probably too short."
|
||||||
|
)
|
||||||
new_samples = size2
|
new_samples = size2
|
||||||
|
|
||||||
unwrapped = UnwrappedHistory()
|
unwrapped = UnwrappedHistory()
|
||||||
for field in HISTORY_FIELDS:
|
for field in HISTORY_FIELDS:
|
||||||
|
if hasattr(history1, field) and hasattr(history2, field):
|
||||||
setattr(unwrapped, field, [])
|
setattr(unwrapped, field, [])
|
||||||
unwrapped.unwrapped = True
|
unwrapped.unwrapped = True
|
||||||
|
|
||||||
sample_range, ignore1, ignore2 = _compute_sample_range( # pylint: disable=unused-variable
|
(
|
||||||
history1, samples1, start=start1)
|
sample_range,
|
||||||
|
ignore1,
|
||||||
|
ignore2,
|
||||||
|
) = _compute_sample_range( # pylint: disable=unused-variable
|
||||||
|
history1, samples1, start=start1
|
||||||
|
)
|
||||||
for i in sample_range:
|
for i in sample_range:
|
||||||
for field in HISTORY_FIELDS:
|
for field in HISTORY_FIELDS:
|
||||||
|
if hasattr(unwrapped, field):
|
||||||
|
try:
|
||||||
getattr(unwrapped, field).append(getattr(history1, field)[i])
|
getattr(unwrapped, field).append(getattr(history1, field)[i])
|
||||||
|
except (IndexError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
sample_range, ignore1, ignore2 = _compute_sample_range(history2, new_samples) # pylint: disable=unused-variable
|
sample_range, ignore1, ignore2 = _compute_sample_range(
|
||||||
|
history2, new_samples
|
||||||
|
) # pylint: disable=unused-variable
|
||||||
for i in sample_range:
|
for i in sample_range:
|
||||||
for field in HISTORY_FIELDS:
|
for field in HISTORY_FIELDS:
|
||||||
|
if hasattr(unwrapped, field):
|
||||||
|
try:
|
||||||
getattr(unwrapped, field).append(getattr(history2, field)[i])
|
getattr(unwrapped, field).append(getattr(history2, field)[i])
|
||||||
|
except (IndexError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
unwrapped.current = history2.current
|
unwrapped.current = history2.current
|
||||||
return unwrapped
|
return unwrapped
|
||||||
|
|
||||||
|
|
||||||
def history_bulk_data(parse_samples: int,
|
def history_bulk_data(
|
||||||
|
parse_samples: int,
|
||||||
start: Optional[int] = None,
|
start: Optional[int] = None,
|
||||||
verbose: bool = False,
|
verbose: bool = False,
|
||||||
context: Optional[ChannelContext] = None,
|
context: Optional[ChannelContext] = None,
|
||||||
history=None) -> Tuple[HistGeneralDict, HistBulkDict]:
|
history=None,
|
||||||
|
) -> Tuple[HistGeneralDict, HistBulkDict]:
|
||||||
"""Fetch history data for a range of samples.
|
"""Fetch history data for a range of samples.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -1096,13 +1259,12 @@ def history_bulk_data(parse_samples: int,
|
||||||
if history is None:
|
if history is None:
|
||||||
try:
|
try:
|
||||||
history = get_history(context)
|
history = get_history(context)
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
sample_range, parsed_samples, current = _compute_sample_range(history,
|
sample_range, parsed_samples, current = _compute_sample_range(
|
||||||
parse_samples,
|
history, parse_samples, start=start, verbose=verbose
|
||||||
start=start,
|
)
|
||||||
verbose=verbose)
|
|
||||||
|
|
||||||
pop_ping_drop_rate = []
|
pop_ping_drop_rate = []
|
||||||
pop_ping_latency_ms = []
|
pop_ping_latency_ms = []
|
||||||
|
@ -1110,11 +1272,30 @@ def history_bulk_data(parse_samples: int,
|
||||||
uplink_throughput_bps = []
|
uplink_throughput_bps = []
|
||||||
|
|
||||||
for i in sample_range:
|
for i in sample_range:
|
||||||
|
# pop_ping_drop_rate is checked in _compute_sample_range
|
||||||
pop_ping_drop_rate.append(history.pop_ping_drop_rate[i])
|
pop_ping_drop_rate.append(history.pop_ping_drop_rate[i])
|
||||||
pop_ping_latency_ms.append(
|
|
||||||
history.pop_ping_latency_ms[i] if history.pop_ping_drop_rate[i] < 1 else None)
|
latency = None
|
||||||
downlink_throughput_bps.append(history.downlink_throughput_bps[i])
|
try:
|
||||||
uplink_throughput_bps.append(history.uplink_throughput_bps[i])
|
if history.pop_ping_drop_rate[i] < 1:
|
||||||
|
latency = history.pop_ping_latency_ms[i]
|
||||||
|
except (AttributeError, IndexError, TypeError):
|
||||||
|
pass
|
||||||
|
pop_ping_latency_ms.append(latency)
|
||||||
|
|
||||||
|
downlink = None
|
||||||
|
try:
|
||||||
|
downlink = history.downlink_throughput_bps[i]
|
||||||
|
except (AttributeError, IndexError, TypeError):
|
||||||
|
pass
|
||||||
|
downlink_throughput_bps.append(downlink)
|
||||||
|
|
||||||
|
uplink = None
|
||||||
|
try:
|
||||||
|
uplink = history.uplink_throughput_bps[i]
|
||||||
|
except (AttributeError, IndexError, TypeError):
|
||||||
|
pass
|
||||||
|
uplink_throughput_bps.append(uplink)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"samples": parsed_samples,
|
"samples": parsed_samples,
|
||||||
|
@ -1130,10 +1311,9 @@ def history_bulk_data(parse_samples: int,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def history_ping_stats(parse_samples: int,
|
def history_ping_stats(
|
||||||
verbose: bool = False,
|
parse_samples: int, verbose: bool = False, context: Optional[ChannelContext] = None
|
||||||
context: Optional[ChannelContext] = None
|
) -> Tuple[HistGeneralDict, PingDropDict, PingDropRlDict]:
|
||||||
) -> Tuple[HistGeneralDict, PingDropDict, PingDropRlDict]:
|
|
||||||
"""Deprecated. Use history_stats instead."""
|
"""Deprecated. Use history_stats instead."""
|
||||||
return history_stats(parse_samples, verbose=verbose, context=context)[0:3]
|
return history_stats(parse_samples, verbose=verbose, context=context)[0:3]
|
||||||
|
|
||||||
|
@ -1143,9 +1323,15 @@ def history_stats(
|
||||||
start: Optional[int] = None,
|
start: Optional[int] = None,
|
||||||
verbose: bool = False,
|
verbose: bool = False,
|
||||||
context: Optional[ChannelContext] = None,
|
context: Optional[ChannelContext] = None,
|
||||||
history=None
|
history=None,
|
||||||
) -> Tuple[HistGeneralDict, PingDropDict, PingDropRlDict, PingLatencyDict, LoadedLatencyDict,
|
) -> Tuple[
|
||||||
UsageDict]:
|
HistGeneralDict,
|
||||||
|
PingDropDict,
|
||||||
|
PingDropRlDict,
|
||||||
|
PingLatencyDict,
|
||||||
|
LoadedLatencyDict,
|
||||||
|
UsageDict,
|
||||||
|
]:
|
||||||
"""Fetch, parse, and compute ping and usage stats.
|
"""Fetch, parse, and compute ping and usage stats.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
|
@ -1181,13 +1367,12 @@ def history_stats(
|
||||||
if history is None:
|
if history is None:
|
||||||
try:
|
try:
|
||||||
history = get_history(context)
|
history = get_history(context)
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
sample_range, parsed_samples, current = _compute_sample_range(history,
|
sample_range, parsed_samples, current = _compute_sample_range(
|
||||||
parse_samples,
|
history, parse_samples, start=start, verbose=verbose
|
||||||
start=start,
|
)
|
||||||
verbose=verbose)
|
|
||||||
|
|
||||||
tot = 0.0
|
tot = 0.0
|
||||||
count_full_drop = 0
|
count_full_drop = 0
|
||||||
|
@ -1224,23 +1409,36 @@ def history_stats(
|
||||||
if run_length <= 60:
|
if run_length <= 60:
|
||||||
second_runs[run_length - 1] += run_length
|
second_runs[run_length - 1] += run_length
|
||||||
else:
|
else:
|
||||||
minute_runs[min((run_length-1) // 60 - 1, 59)] += run_length
|
minute_runs[min((run_length - 1) // 60 - 1, 59)] += run_length
|
||||||
run_length = 0
|
run_length = 0
|
||||||
elif init_run_length is None:
|
elif init_run_length is None:
|
||||||
init_run_length = 0
|
init_run_length = 0
|
||||||
tot += d
|
tot += d
|
||||||
|
|
||||||
|
down = 0.0
|
||||||
|
try:
|
||||||
down = history.downlink_throughput_bps[i]
|
down = history.downlink_throughput_bps[i]
|
||||||
|
except (AttributeError, IndexError, TypeError):
|
||||||
|
pass
|
||||||
usage_down += down
|
usage_down += down
|
||||||
|
|
||||||
|
up = 0.0
|
||||||
|
try:
|
||||||
up = history.uplink_throughput_bps[i]
|
up = history.uplink_throughput_bps[i]
|
||||||
|
except (AttributeError, IndexError, TypeError):
|
||||||
|
pass
|
||||||
usage_up += up
|
usage_up += up
|
||||||
|
|
||||||
|
rtt = 0.0
|
||||||
|
try:
|
||||||
rtt = history.pop_ping_latency_ms[i]
|
rtt = history.pop_ping_latency_ms[i]
|
||||||
|
except (AttributeError, IndexError, TypeError):
|
||||||
|
pass
|
||||||
# note that "full" here means the opposite of ping drop full
|
# note that "full" here means the opposite of ping drop full
|
||||||
if d == 0.0:
|
if d == 0.0:
|
||||||
rtt_full.append(rtt)
|
rtt_full.append(rtt)
|
||||||
if down + up > 500000:
|
if down + up > 500000:
|
||||||
rtt_buckets[min(14, int(math.log2((down+up) / 500000)))].append(rtt)
|
rtt_buckets[min(14, int(math.log2((down + up) / 500000)))].append(rtt)
|
||||||
else:
|
else:
|
||||||
rtt_buckets[0].append(rtt)
|
rtt_buckets[0].append(rtt)
|
||||||
if d < 1.0:
|
if d < 1.0:
|
||||||
|
@ -1256,7 +1454,7 @@ def history_stats(
|
||||||
|
|
||||||
def weighted_mean_and_quantiles(data, n):
|
def weighted_mean_and_quantiles(data, n):
|
||||||
if not data:
|
if not data:
|
||||||
return None, [None] * (n+1)
|
return None, [None] * (n + 1)
|
||||||
total_weight = sum(x[1] for x in data)
|
total_weight = sum(x[1] for x in data)
|
||||||
result = []
|
result = []
|
||||||
items = iter(data)
|
items = iter(data)
|
||||||
|
@ -1295,12 +1493,16 @@ def history_stats(
|
||||||
rtt_all.sort(key=lambda x: x[0])
|
rtt_all.sort(key=lambda x: x[0])
|
||||||
wmean_all, wdeciles_all = weighted_mean_and_quantiles(rtt_all, 10)
|
wmean_all, wdeciles_all = weighted_mean_and_quantiles(rtt_all, 10)
|
||||||
rtt_full.sort()
|
rtt_full.sort()
|
||||||
mean_full, deciles_full = weighted_mean_and_quantiles(tuple((x, 1.0) for x in rtt_full), 10)
|
mean_full, deciles_full = weighted_mean_and_quantiles(
|
||||||
|
tuple((x, 1.0) for x in rtt_full), 10
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return (
|
||||||
|
{
|
||||||
"samples": parsed_samples,
|
"samples": parsed_samples,
|
||||||
"end_counter": current,
|
"end_counter": current,
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"total_ping_drop": tot,
|
"total_ping_drop": tot,
|
||||||
"count_full_ping_drop": count_full_drop,
|
"count_full_ping_drop": count_full_drop,
|
||||||
"count_obstructed": count_obstruct,
|
"count_obstructed": count_obstruct,
|
||||||
|
@ -1309,26 +1511,33 @@ def history_stats(
|
||||||
"count_unscheduled": count_unsched,
|
"count_unscheduled": count_unsched,
|
||||||
"total_unscheduled_ping_drop": total_unsched_drop,
|
"total_unscheduled_ping_drop": total_unsched_drop,
|
||||||
"count_full_unscheduled_ping_drop": count_full_unsched,
|
"count_full_unscheduled_ping_drop": count_full_unsched,
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"init_run_fragment": init_run_length,
|
"init_run_fragment": init_run_length,
|
||||||
"final_run_fragment": run_length,
|
"final_run_fragment": run_length,
|
||||||
"run_seconds[1,]": second_runs,
|
"run_seconds[1,]": second_runs,
|
||||||
"run_minutes[1,]": minute_runs,
|
"run_minutes[1,]": minute_runs,
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"mean_all_ping_latency": wmean_all,
|
"mean_all_ping_latency": wmean_all,
|
||||||
"deciles_all_ping_latency[]": wdeciles_all,
|
"deciles_all_ping_latency[]": wdeciles_all,
|
||||||
"mean_full_ping_latency": mean_full,
|
"mean_full_ping_latency": mean_full,
|
||||||
"deciles_full_ping_latency[]": deciles_full,
|
"deciles_full_ping_latency[]": deciles_full,
|
||||||
"stdev_full_ping_latency": statistics.pstdev(rtt_full) if rtt_full else None,
|
"stdev_full_ping_latency": statistics.pstdev(rtt_full)
|
||||||
}, {
|
if rtt_full
|
||||||
|
else None,
|
||||||
|
},
|
||||||
|
{
|
||||||
"load_bucket_samples[]": bucket_samples,
|
"load_bucket_samples[]": bucket_samples,
|
||||||
"load_bucket_min_latency[]": bucket_min,
|
"load_bucket_min_latency[]": bucket_min,
|
||||||
"load_bucket_median_latency[]": bucket_median,
|
"load_bucket_median_latency[]": bucket_median,
|
||||||
"load_bucket_max_latency[]": bucket_max,
|
"load_bucket_max_latency[]": bucket_max,
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"download_usage": int(round(usage_down / 8)),
|
"download_usage": int(round(usage_down / 8)),
|
||||||
"upload_usage": int(round(usage_up / 8)),
|
"upload_usage": int(round(usage_up / 8)),
|
||||||
}
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_obstruction_map(context: Optional[ChannelContext] = None):
|
def get_obstruction_map(context: Optional[ChannelContext] = None):
|
||||||
|
@ -1342,13 +1551,18 @@ def get_obstruction_map(context: Optional[ChannelContext] = None):
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
grpc.RpcError: Communication or service error.
|
grpc.RpcError: Communication or service error.
|
||||||
|
AttributeError, ValueError: Protocol error. Either the target is not a
|
||||||
|
Starlink user terminal or the grpc protocol has changed in a way
|
||||||
|
this module cannot handle.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def grpc_call(channel: grpc.Channel):
|
def grpc_call(channel: grpc.Channel):
|
||||||
if imports_pending:
|
if imports_pending:
|
||||||
resolve_imports(channel)
|
resolve_imports(channel)
|
||||||
stub = device_pb2_grpc.DeviceStub(channel)
|
stub = device_pb2_grpc.DeviceStub(channel)
|
||||||
response = stub.Handle(device_pb2.Request(dish_get_obstruction_map={}),
|
response = stub.Handle(
|
||||||
timeout=REQUEST_TIMEOUT)
|
device_pb2.Request(dish_get_obstruction_map={}), timeout=REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
return response.dish_get_obstruction_map
|
return response.dish_get_obstruction_map
|
||||||
|
|
||||||
return call_with_channel(grpc_call, context=context)
|
return call_with_channel(grpc_call, context=context)
|
||||||
|
@ -1368,15 +1582,22 @@ def obstruction_map(context: Optional[ChannelContext] = None):
|
||||||
representation the SNR data instead, see `get_obstruction_map`.
|
representation the SNR data instead, see `get_obstruction_map`.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
GrpcError: Failed getting status info from the Starlink user terminal.
|
GrpcError: Failed getting obstruction data from the Starlink user
|
||||||
|
terminal.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
map_data = get_obstruction_map(context)
|
map_data = get_obstruction_map(context)
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
|
try:
|
||||||
cols = map_data.num_cols
|
cols = map_data.num_cols
|
||||||
return tuple((map_data.snr[i:i + cols]) for i in range(0, cols * map_data.num_rows, cols))
|
return tuple(
|
||||||
|
(map_data.snr[i : i + cols])
|
||||||
|
for i in range(0, cols * map_data.num_rows, cols)
|
||||||
|
)
|
||||||
|
except (AttributeError, IndexError, TypeError) as e:
|
||||||
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
|
|
||||||
def reboot(context: Optional[ChannelContext] = None) -> None:
|
def reboot(context: Optional[ChannelContext] = None) -> None:
|
||||||
|
@ -1391,6 +1612,7 @@ def reboot(context: Optional[ChannelContext] = None) -> None:
|
||||||
Raises:
|
Raises:
|
||||||
GrpcError: Communication or service error.
|
GrpcError: Communication or service error.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def grpc_call(channel: grpc.Channel) -> None:
|
def grpc_call(channel: grpc.Channel) -> None:
|
||||||
if imports_pending:
|
if imports_pending:
|
||||||
resolve_imports(channel)
|
resolve_imports(channel)
|
||||||
|
@ -1400,11 +1622,13 @@ def reboot(context: Optional[ChannelContext] = None) -> None:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
call_with_channel(grpc_call, context=context)
|
call_with_channel(grpc_call, context=context)
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
|
|
||||||
def set_stow_state(unstow: bool = False, context: Optional[ChannelContext] = None) -> None:
|
def set_stow_state(
|
||||||
|
unstow: bool = False, context: Optional[ChannelContext] = None
|
||||||
|
) -> None:
|
||||||
"""Request dish stow or unstow operation.
|
"""Request dish stow or unstow operation.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -1418,14 +1642,67 @@ def set_stow_state(unstow: bool = False, context: Optional[ChannelContext] = Non
|
||||||
Raises:
|
Raises:
|
||||||
GrpcError: Communication or service error.
|
GrpcError: Communication or service error.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def grpc_call(channel: grpc.Channel) -> None:
|
def grpc_call(channel: grpc.Channel) -> None:
|
||||||
if imports_pending:
|
if imports_pending:
|
||||||
resolve_imports(channel)
|
resolve_imports(channel)
|
||||||
stub = device_pb2_grpc.DeviceStub(channel)
|
stub = device_pb2_grpc.DeviceStub(channel)
|
||||||
stub.Handle(device_pb2.Request(dish_stow={"unstow": unstow}), timeout=REQUEST_TIMEOUT)
|
stub.Handle(
|
||||||
|
device_pb2.Request(dish_stow={"unstow": unstow}), timeout=REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
# response is empty message in this case, so just ignore it
|
# response is empty message in this case, so just ignore it
|
||||||
|
|
||||||
try:
|
try:
|
||||||
call_with_channel(grpc_call, context=context)
|
call_with_channel(grpc_call, context=context)
|
||||||
except grpc.RpcError as e:
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
|
raise GrpcError(e) from e
|
||||||
|
|
||||||
|
|
||||||
|
def set_sleep_config(
|
||||||
|
start: int,
|
||||||
|
duration: int,
|
||||||
|
enable: bool = True,
|
||||||
|
context: Optional[ChannelContext] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Set sleep mode configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start (int): Time, in minutes past midnight UTC, to start sleep mode
|
||||||
|
each day. Ignored if enable is set to False.
|
||||||
|
duration (int): Duration of sleep mode, in minutes. Ignored if enable
|
||||||
|
is set to False.
|
||||||
|
enable (bool): Whether or not to enable sleep mode.
|
||||||
|
context (ChannelContext): Optionally provide a channel for reuse
|
||||||
|
across repeated calls. If an existing channel is reused, the RPC
|
||||||
|
call will be retried at most once, since connectivity may have
|
||||||
|
been lost and restored in the time since it was last used.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GrpcError: Communication or service error, including invalid start or
|
||||||
|
duration.
|
||||||
|
"""
|
||||||
|
if not enable:
|
||||||
|
start = 0
|
||||||
|
# duration of 0 not allowed, even when disabled
|
||||||
|
duration = 1
|
||||||
|
|
||||||
|
def grpc_call(channel: grpc.Channel) -> None:
|
||||||
|
if imports_pending:
|
||||||
|
resolve_imports(channel)
|
||||||
|
stub = device_pb2_grpc.DeviceStub(channel)
|
||||||
|
stub.Handle(
|
||||||
|
device_pb2.Request(
|
||||||
|
dish_power_save={
|
||||||
|
"power_save_start_minutes": start,
|
||||||
|
"power_save_duration_minutes": duration,
|
||||||
|
"enable_power_save": enable,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
timeout=REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
|
# response is empty message in this case, so just ignore it
|
||||||
|
|
||||||
|
try:
|
||||||
|
call_with_channel(grpc_call, context=context)
|
||||||
|
except (AttributeError, ValueError, grpc.RpcError) as e:
|
||||||
raise GrpcError(e) from e
|
raise GrpcError(e) from e
|
|
@ -9,13 +9,12 @@ response does.
|
||||||
See the starlink_grpc module docstring for descriptions of the stat elements.
|
See the starlink_grpc module docstring for descriptions of the stat elements.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from itertools import chain
|
||||||
import json
|
import json
|
||||||
import math
|
import math
|
||||||
import statistics
|
import statistics
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
|
|
||||||
class JsonError(Exception):
|
class JsonError(Exception):
|
||||||
"""Provides error info when something went wrong with JSON parsing."""
|
"""Provides error info when something went wrong with JSON parsing."""
|
||||||
|
@ -66,10 +65,12 @@ def history_stats_field_names():
|
||||||
additional data groups, so it not recommended for the caller to
|
additional data groups, so it not recommended for the caller to
|
||||||
assume exactly 6 elements.
|
assume exactly 6 elements.
|
||||||
"""
|
"""
|
||||||
return [
|
return (
|
||||||
|
[
|
||||||
"samples",
|
"samples",
|
||||||
"end_counter",
|
"end_counter",
|
||||||
], [
|
],
|
||||||
|
[
|
||||||
"total_ping_drop",
|
"total_ping_drop",
|
||||||
"count_full_ping_drop",
|
"count_full_ping_drop",
|
||||||
"count_obstructed",
|
"count_obstructed",
|
||||||
|
@ -78,26 +79,31 @@ def history_stats_field_names():
|
||||||
"count_unscheduled",
|
"count_unscheduled",
|
||||||
"total_unscheduled_ping_drop",
|
"total_unscheduled_ping_drop",
|
||||||
"count_full_unscheduled_ping_drop",
|
"count_full_unscheduled_ping_drop",
|
||||||
], [
|
],
|
||||||
|
[
|
||||||
"init_run_fragment",
|
"init_run_fragment",
|
||||||
"final_run_fragment",
|
"final_run_fragment",
|
||||||
"run_seconds[1,61]",
|
"run_seconds[1,61]",
|
||||||
"run_minutes[1,61]",
|
"run_minutes[1,61]",
|
||||||
], [
|
],
|
||||||
|
[
|
||||||
"mean_all_ping_latency",
|
"mean_all_ping_latency",
|
||||||
"deciles_all_ping_latency[11]",
|
"deciles_all_ping_latency[11]",
|
||||||
"mean_full_ping_latency",
|
"mean_full_ping_latency",
|
||||||
"deciles_full_ping_latency[11]",
|
"deciles_full_ping_latency[11]",
|
||||||
"stdev_full_ping_latency",
|
"stdev_full_ping_latency",
|
||||||
], [
|
],
|
||||||
|
[
|
||||||
"load_bucket_samples[15]",
|
"load_bucket_samples[15]",
|
||||||
"load_bucket_min_latency[15]",
|
"load_bucket_min_latency[15]",
|
||||||
"load_bucket_median_latency[15]",
|
"load_bucket_median_latency[15]",
|
||||||
"load_bucket_max_latency[15]",
|
"load_bucket_max_latency[15]",
|
||||||
], [
|
],
|
||||||
|
[
|
||||||
"download_usage",
|
"download_usage",
|
||||||
"upload_usage",
|
"upload_usage",
|
||||||
]
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_history(filename):
|
def get_history(filename):
|
||||||
|
@ -184,9 +190,9 @@ def history_bulk_data(filename, parse_samples, verbose=False):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise JsonError(e)
|
raise JsonError(e)
|
||||||
|
|
||||||
sample_range, parsed_samples, current = _compute_sample_range(history,
|
sample_range, parsed_samples, current = _compute_sample_range(
|
||||||
parse_samples,
|
history, parse_samples, verbose=verbose
|
||||||
verbose=verbose)
|
)
|
||||||
|
|
||||||
pop_ping_drop_rate = []
|
pop_ping_drop_rate = []
|
||||||
pop_ping_latency_ms = []
|
pop_ping_latency_ms = []
|
||||||
|
@ -196,7 +202,10 @@ def history_bulk_data(filename, parse_samples, verbose=False):
|
||||||
for i in sample_range:
|
for i in sample_range:
|
||||||
pop_ping_drop_rate.append(history["popPingDropRate"][i])
|
pop_ping_drop_rate.append(history["popPingDropRate"][i])
|
||||||
pop_ping_latency_ms.append(
|
pop_ping_latency_ms.append(
|
||||||
history["popPingLatencyMs"][i] if history["popPingDropRate"][i] < 1 else None)
|
history["popPingLatencyMs"][i]
|
||||||
|
if history["popPingDropRate"][i] < 1
|
||||||
|
else None
|
||||||
|
)
|
||||||
downlink_throughput_bps.append(history["downlinkThroughputBps"][i])
|
downlink_throughput_bps.append(history["downlinkThroughputBps"][i])
|
||||||
uplink_throughput_bps.append(history["uplinkThroughputBps"][i])
|
uplink_throughput_bps.append(history["uplinkThroughputBps"][i])
|
||||||
|
|
||||||
|
@ -250,9 +259,9 @@ def history_stats(filename, parse_samples, verbose=False):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise JsonError(e)
|
raise JsonError(e)
|
||||||
|
|
||||||
sample_range, parsed_samples, current = _compute_sample_range(history,
|
sample_range, parsed_samples, current = _compute_sample_range(
|
||||||
parse_samples,
|
history, parse_samples, verbose=verbose
|
||||||
verbose=verbose)
|
)
|
||||||
|
|
||||||
tot = 0.0
|
tot = 0.0
|
||||||
count_full_drop = 0
|
count_full_drop = 0
|
||||||
|
@ -289,7 +298,7 @@ def history_stats(filename, parse_samples, verbose=False):
|
||||||
if run_length <= 60:
|
if run_length <= 60:
|
||||||
second_runs[run_length - 1] += run_length
|
second_runs[run_length - 1] += run_length
|
||||||
else:
|
else:
|
||||||
minute_runs[min((run_length-1) // 60 - 1, 59)] += run_length
|
minute_runs[min((run_length - 1) // 60 - 1, 59)] += run_length
|
||||||
run_length = 0
|
run_length = 0
|
||||||
elif init_run_length is None:
|
elif init_run_length is None:
|
||||||
init_run_length = 0
|
init_run_length = 0
|
||||||
|
@ -305,7 +314,7 @@ def history_stats(filename, parse_samples, verbose=False):
|
||||||
if d == 0.0:
|
if d == 0.0:
|
||||||
rtt_full.append(rtt)
|
rtt_full.append(rtt)
|
||||||
if down + up > 500000:
|
if down + up > 500000:
|
||||||
rtt_buckets[min(14, int(math.log2((down+up) / 500000)))].append(rtt)
|
rtt_buckets[min(14, int(math.log2((down + up) / 500000)))].append(rtt)
|
||||||
else:
|
else:
|
||||||
rtt_buckets[0].append(rtt)
|
rtt_buckets[0].append(rtt)
|
||||||
if d < 1.0:
|
if d < 1.0:
|
||||||
|
@ -321,7 +330,7 @@ def history_stats(filename, parse_samples, verbose=False):
|
||||||
|
|
||||||
def weighted_mean_and_quantiles(data, n):
|
def weighted_mean_and_quantiles(data, n):
|
||||||
if not data:
|
if not data:
|
||||||
return None, [None] * (n+1)
|
return None, [None] * (n + 1)
|
||||||
total_weight = sum(x[1] for x in data)
|
total_weight = sum(x[1] for x in data)
|
||||||
result = []
|
result = []
|
||||||
items = iter(data)
|
items = iter(data)
|
||||||
|
@ -360,12 +369,16 @@ def history_stats(filename, parse_samples, verbose=False):
|
||||||
rtt_all.sort(key=lambda x: x[0])
|
rtt_all.sort(key=lambda x: x[0])
|
||||||
wmean_all, wdeciles_all = weighted_mean_and_quantiles(rtt_all, 10)
|
wmean_all, wdeciles_all = weighted_mean_and_quantiles(rtt_all, 10)
|
||||||
rtt_full.sort()
|
rtt_full.sort()
|
||||||
mean_full, deciles_full = weighted_mean_and_quantiles(tuple((x, 1.0) for x in rtt_full), 10)
|
mean_full, deciles_full = weighted_mean_and_quantiles(
|
||||||
|
tuple((x, 1.0) for x in rtt_full), 10
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return (
|
||||||
|
{
|
||||||
"samples": parsed_samples,
|
"samples": parsed_samples,
|
||||||
"end_counter": current,
|
"end_counter": current,
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"total_ping_drop": tot,
|
"total_ping_drop": tot,
|
||||||
"count_full_ping_drop": count_full_drop,
|
"count_full_ping_drop": count_full_drop,
|
||||||
"count_obstructed": count_obstruct,
|
"count_obstructed": count_obstruct,
|
||||||
|
@ -374,23 +387,30 @@ def history_stats(filename, parse_samples, verbose=False):
|
||||||
"count_unscheduled": count_unsched,
|
"count_unscheduled": count_unsched,
|
||||||
"total_unscheduled_ping_drop": total_unsched_drop,
|
"total_unscheduled_ping_drop": total_unsched_drop,
|
||||||
"count_full_unscheduled_ping_drop": count_full_unsched,
|
"count_full_unscheduled_ping_drop": count_full_unsched,
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"init_run_fragment": init_run_length,
|
"init_run_fragment": init_run_length,
|
||||||
"final_run_fragment": run_length,
|
"final_run_fragment": run_length,
|
||||||
"run_seconds[1,]": second_runs,
|
"run_seconds[1,]": second_runs,
|
||||||
"run_minutes[1,]": minute_runs,
|
"run_minutes[1,]": minute_runs,
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"mean_all_ping_latency": wmean_all,
|
"mean_all_ping_latency": wmean_all,
|
||||||
"deciles_all_ping_latency[]": wdeciles_all,
|
"deciles_all_ping_latency[]": wdeciles_all,
|
||||||
"mean_full_ping_latency": mean_full,
|
"mean_full_ping_latency": mean_full,
|
||||||
"deciles_full_ping_latency[]": deciles_full,
|
"deciles_full_ping_latency[]": deciles_full,
|
||||||
"stdev_full_ping_latency": statistics.pstdev(rtt_full) if rtt_full else None,
|
"stdev_full_ping_latency": statistics.pstdev(rtt_full)
|
||||||
}, {
|
if rtt_full
|
||||||
|
else None,
|
||||||
|
},
|
||||||
|
{
|
||||||
"load_bucket_samples[]": bucket_samples,
|
"load_bucket_samples[]": bucket_samples,
|
||||||
"load_bucket_min_latency[]": bucket_min,
|
"load_bucket_min_latency[]": bucket_min,
|
||||||
"load_bucket_median_latency[]": bucket_median,
|
"load_bucket_median_latency[]": bucket_median,
|
||||||
"load_bucket_max_latency[]": bucket_max,
|
"load_bucket_max_latency[]": bucket_max,
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"download_usage": int(round(usage_down / 8)),
|
"download_usage": int(round(usage_down / 8)),
|
||||||
"upload_usage": int(round(usage_up / 8)),
|
"upload_usage": int(round(usage_up / 8)),
|
||||||
}
|
},
|
||||||
|
)
|
Loading…
Reference in a new issue