Mr Propre

This commit is contained in:
Alban Bronisz 2021-11-21 15:53:34 +01:00
parent e22dc4e09e
commit 3458d9d2a7
7 changed files with 13 additions and 189 deletions

9
.gitignore vendored
View File

@ -1,5 +1,6 @@
stats.json
venv
mem_usage.png
__pycache__/
venv/
*.sqlite
*.json
*.html

View File

@ -1,92 +0,0 @@
import argparse
import json
from datetime import datetime
import pygal
PNG_OUTPUT = 'mem_usage.png'
def parse_args():
parser = argparse.ArgumentParser(
description=("Render for docker stats memory usage")
)
parser.add_argument('stats', type=str, help="Path to stats file")
parser.add_argument('--web', '-w', action='store_true',
help="Render in web browser instead of svg")
args = parser.parse_args()
return args.stats, args.web
def extract_names(data: list):
names = set()
for d in data:
for field in d:
name = name_from_field(field)
names.update([name])
return names
def name_from_field(field: str) -> str :
if field.startswith("onlyoffice-"):
return "onlyoffice"
name = field.split("_")[0]
name = name.replace("org-caracals-", "")
name = name.replace(".caracals.org", "")
return name
def main():
stats_fn, web_render = parse_args()
data = load_data(stats_fn)
render(data, web_render)
def load_data(stats_fn: str):
with open(stats_fn) as stats_f:
data = json.load(stats_f)
data_dict = {name_from_field(field): [0]*len(data) for field in extract_names(data)}
print("Found", len(data), "points")
print(" keys:", data_dict.keys())
for t_i, stat in enumerate(data):
for field in stat:
if field == "date": # date
date_ = stat[field].replace("+01:00", "+0000").replace("+02:00", "+0200")
data_dict[field][t_i] = datetime.strptime(date_, "%Y-%m-%dT%H:%M:%S%z")
else: # float
value = stat[field].split(" ")[0]
value = value.replace("MiB", "e3")
value = value.replace("GiB", "e6")
value = value.replace("B", "")
data_dict[name_from_field(field)][t_i] += float(value) / 1000 # values are in MiB
return data_dict
def render(data: dict, web_render:bool=False):
style = pygal.style.Style(value_label_font_size=5, value_font_size=5, label_font_size=5, legend_font_size=5)
bar_chart = pygal.StackedBar(height=400, x_label_rotation=25, style=style, legend_box_size=5)
labels = [d.strftime("%B %d %H:%M") for d in data["date"]]
bar_chart.x_labels = labels
for k in data:
if k == "date":
continue
#if "db" in k or "database" in k or "mysql" in k or "mongo" in k or "postgre" in k:
# continue
bar_chart.add(k, data[k])
if web_render:
bar_chart.render_in_browser()
else:
bar_chart.render_to_png(PNG_OUTPUT)
print("Image generated in:", PNG_OUTPUT)
if __name__ == "__main__":
main()

View File

@ -1,51 +0,0 @@
#!/usr/bin/env bash
set -eu
# Need to replace last line of file from "}," to "}]"
# To avoid modify original file, use temporary file
TMP_FILE=stats_tmp.json
VENV=venv
if [[ $# -ne 1 ]] && [[ $# -ne 2 ]]; then
echo "Usage:"
echo " $0 STATS_FILE [-w] "
echo ""
echo "Params:"
echo " - stats_file: path to stats file"
echo ""
echo "Options:"
echo " - w: do render using web browser"
exit 1
fi
STATS_FILE=$1
OPT=""
if [[ $# -eq 2 ]]; then
OPT=${OPT}" -w"
fi
# Pip install -----------------------------------------------------------------
if [ ! -d ${VENV} ]; then
echo "Installing python environment in ${VENV}..."
python3 -m venv ${VENV}
. ${VENV}/bin/activate
pip install --upgrade pip
pip install -r requirements.txt
echo "Python environment installed..."
echo ""
else
. ${VENV}/bin/activate
fi
# Render ----------------------------------------------------------------------
sed -e '$s/},/}]/' ${STATS_FILE} > ${TMP_FILE}
# Update last line
# Generate render
python render.py ${TMP_FILE} ${OPT}
rm ${TMP_FILE}
deactivate

View File

@ -1,10 +0,0 @@
plotly
pygal
# For pygal web render
lxml
tinycss
cssselect
cairosvg

View File

@ -31,7 +31,7 @@ def parse_args():
"html",
nargs="?",
type=Path,
help="Path to sqlite file",
help="Path to html output report",
default=HERE / "stats.html",
)
parser.add_argument("--debug", "-d", action="store_true", help="Run in debug mode")
@ -41,6 +41,7 @@ def parse_args():
def generate_html_report(sqlite_fn: Path, html_fn: Path):
# Create your connection.
log.info(f"Read sqlite {sqlite_fn}")
cnx = sqlite3.connect(sqlite_fn)
df = pd.read_sql_query(f"SELECT * FROM {TABLE_NAME}", cnx)
@ -56,7 +57,7 @@ def generate_html_report(sqlite_fn: Path, html_fn: Path):
# dfi["granu"] = i
dfs = dfs.append(dfi)
log.info(f"Find {len(dfs)} items")
log.info(f"Get {len(dfs)} items after sub-sampling")
fig = px.area(dfs)
fig.for_each_trace(lambda trace: trace.update(fillcolor=trace.line.color))
fig["layout"].pop("updatemenus") # optional, drop animation buttons
@ -69,6 +70,7 @@ def generate_html_report(sqlite_fn: Path, html_fn: Path):
yaxis_title="RAM",
)
fig.write_html(html_fn, include_plotlyjs="cdn")
log.info(f"Report generate in {html_fn}")
def main():

View File

@ -1,11 +1,11 @@
#!/usr/bin/env python3
import argparse
from datetime import datetime
import json
import logging
import os
import sqlite3
from datetime import datetime
from pathlib import Path
from subprocess import check_output
from typing import Dict
@ -81,7 +81,9 @@ def format_stats(out):
json_stats = b"{" + out.replace(b"\n", b", ")[:-2] + b"}"
stats = json.loads(json_stats)
for k in stats:
stats[k] = str(stats[k]).split(" ")[0] # keep first memory (ex: "658.4MiB / 7.724GiB" > "658.4MiB")
stats[k] = str(stats[k]).split(" ")[
0
] # keep first memory (ex: "658.4MiB / 7.724GiB" > "658.4MiB")
stats[k] = stats[k].replace("MiB", "e3")
stats[k] = stats[k].replace("GiB", "e6")
stats[k] = stats[k].replace("B", "")

View File

@ -1,28 +0,0 @@
#!/usr/bin/env bash
set -eu
SCRIPT_PATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
OUTPUT="stats.json"
# Move in repo folder
pushd ${SCRIPT_PATH} > /dev/null
# If file does not exist, create it with JSON bracket
if [ ! -f $OUTPUT ]; then
echo "[" >> ${OUTPUT}
fi
# Append stat data
now=$(date -Iseconds)
echo "{" >> ${OUTPUT}
echo " \"data\": \"${now}\"" >> ${OUTPUT}
/snap/bin/docker stats --no-stream --format " ,\"{{.Name}}\": \"{{.MemUsage}}\"" >> ${OUTPUT}
echo "}," >> ${OUTPUT}
# Back to original path
popd