docker-stats-histo/generate_html.py

103 lines
2.9 KiB
Python
Raw Normal View History

2021-11-21 14:13:54 +00:00
#!/usr/bin/env python3
import argparse
import logging
import os
2021-04-07 20:38:44 +00:00
import sqlite3
2021-11-25 21:41:53 +00:00
from datetime import datetime
from datetime import timedelta
2021-11-21 14:13:54 +00:00
from pathlib import Path
2021-04-07 20:38:44 +00:00
import pandas as pd
import plotly.express as px
2021-11-21 14:13:54 +00:00
log = logging.getLogger(__name__)
HERE = Path(os.path.dirname(os.path.realpath(__file__)))
TABLE_NAME = "dc_stats"
INTERVALS = [1] # on cron interval is 10min
def parse_args():
parser = argparse.ArgumentParser(description="Extract docker stats in a sqlite db")
parser.add_argument(
"sqlite",
nargs="?",
type=Path,
help="Path to sqlite file",
default=HERE / "stats.sqlite",
)
parser.add_argument(
"html",
nargs="?",
type=Path,
2021-11-21 14:53:34 +00:00
help="Path to html output report",
2021-11-21 14:13:54 +00:00
default=HERE / "stats.html",
)
parser.add_argument("--debug", "-d", action="store_true", help="Run in debug mode")
parser.add_argument("-n", "--n-days-histo", dest="n_days_histo",
type=int,
default=30,
help="Number of day to show")
2021-11-21 14:13:54 +00:00
args = parser.parse_args()
return args.sqlite, args.html, args.debug, args.n_days_histo
2021-11-21 14:13:54 +00:00
def generate_html_report(sqlite_fn: Path, html_fn: Path, n_days_histo):
2021-11-21 14:13:54 +00:00
# Create your connection.
2021-11-21 14:53:34 +00:00
log.info(f"Read sqlite {sqlite_fn}")
2021-11-21 14:13:54 +00:00
cnx = sqlite3.connect(sqlite_fn)
first_date = datetime.today() - timedelta(days = n_days_histo)
log.debug(f"First date : {first_date}")
df = pd.read_sql_query(f"SELECT * FROM {TABLE_NAME} WHERE date > '{first_date}'", cnx)
2021-11-21 14:13:54 +00:00
df = df.set_index("date")
log.info(f"Find {len(df)} items")
2021-04-07 20:38:44 +00:00
2021-11-21 14:13:54 +00:00
# df["granu"] = 1
2021-04-07 20:38:44 +00:00
2021-11-21 14:13:54 +00:00
dfs = pd.DataFrame()
for i in INTERVALS:
dfi = df[::i].copy()
2021-11-21 14:33:10 +00:00
2021-11-21 14:13:54 +00:00
# dfi["granu"] = i
dfs = dfs.append(dfi)
2021-04-07 20:38:44 +00:00
#breakpoint()
# Remove empty conlumn
dfs = dfs.dropna(axis=1, how='all')
#breakpoint()
2021-11-21 14:53:34 +00:00
log.info(f"Get {len(dfs)} items after sub-sampling")
2021-11-21 14:13:54 +00:00
fig = px.area(dfs)
fig.for_each_trace(lambda trace: trace.update(fillcolor=trace.line.color))
fig["layout"].pop("updatemenus") # optional, drop animation buttons
fig.update_layout(transition={"duration": 1e12})
2021-11-25 21:41:53 +00:00
now = datetime.now()
2021-11-21 14:13:54 +00:00
fig.update_layout(
2021-11-25 21:41:53 +00:00
title="Utilisation de RAM pour les docker-compose de l'eunuque (pour voir une stat, mettre la sourie sur "
f"le haut des courbes, vers les points), généré le {now.strftime('%d/%m/%Y %H:%M:%S')}",
2021-11-21 14:13:54 +00:00
xaxis_title="Temps",
yaxis_title="RAM (MiB)",
legend={"traceorder":"reversed"}
2021-11-21 14:13:54 +00:00
)
fig.write_html(html_fn, include_plotlyjs="cdn")
2021-11-21 14:53:34 +00:00
log.info(f"Report generate in {html_fn}")
2021-04-07 20:38:44 +00:00
2021-11-21 14:13:54 +00:00
def main():
sqlite_fn, html_fn, debug, n_days_histo = parse_args()
2021-11-21 14:13:54 +00:00
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
generate_html_report(sqlite_fn, html_fn, n_days_histo)
2021-04-07 20:38:44 +00:00
2021-04-07 21:45:52 +00:00
2021-11-21 14:13:54 +00:00
if __name__ == "__main__":
main()