Default behavior : display stats for the last 30 days only

This commit is contained in:
mathieu 2022-02-27 17:18:09 +01:00
parent c4114c1c5e
commit 28c04cad8b
2 changed files with 22 additions and 7 deletions

View File

@ -5,6 +5,7 @@ import logging
import os import os
import sqlite3 import sqlite3
from datetime import datetime from datetime import datetime
from datetime import timedelta
from pathlib import Path from pathlib import Path
import pandas as pd import pandas as pd
@ -36,16 +37,23 @@ def parse_args():
default=HERE / "stats.html", default=HERE / "stats.html",
) )
parser.add_argument("--debug", "-d", action="store_true", help="Run in debug mode") parser.add_argument("--debug", "-d", action="store_true", help="Run in debug mode")
parser.add_argument("-n", "--n-days-histo", dest="n_days_histo",
type=int,
default=30,
help="Number of day to show")
args = parser.parse_args() args = parser.parse_args()
return args.sqlite, args.html, args.debug return args.sqlite, args.html, args.debug, args.n_days_histo
def generate_html_report(sqlite_fn: Path, html_fn: Path): def generate_html_report(sqlite_fn: Path, html_fn: Path, n_days_histo):
# Create your connection. # Create your connection.
log.info(f"Read sqlite {sqlite_fn}") log.info(f"Read sqlite {sqlite_fn}")
cnx = sqlite3.connect(sqlite_fn) cnx = sqlite3.connect(sqlite_fn)
df = pd.read_sql_query(f"SELECT * FROM {TABLE_NAME}", cnx) first_date = datetime.today() - timedelta(days = n_days_histo)
log.debug(f"First date : {first_date}")
df = pd.read_sql_query(f"SELECT * FROM {TABLE_NAME} WHERE date > '{first_date}'", cnx)
df = df.set_index("date") df = df.set_index("date")
log.info(f"Find {len(df)} items") log.info(f"Find {len(df)} items")
@ -58,6 +66,12 @@ def generate_html_report(sqlite_fn: Path, html_fn: Path):
# dfi["granu"] = i # dfi["granu"] = i
dfs = dfs.append(dfi) dfs = dfs.append(dfi)
#breakpoint()
# Remove empty conlumn
dfs = dfs.dropna(axis=1, how='all')
#breakpoint()
log.info(f"Get {len(dfs)} items after sub-sampling") log.info(f"Get {len(dfs)} items after sub-sampling")
fig = px.area(dfs) fig = px.area(dfs)
fig.for_each_trace(lambda trace: trace.update(fillcolor=trace.line.color)) fig.for_each_trace(lambda trace: trace.update(fillcolor=trace.line.color))
@ -68,19 +82,20 @@ def generate_html_report(sqlite_fn: Path, html_fn: Path):
title="Utilisation de RAM pour les docker-compose de l'eunuque (pour voir une stat, mettre la sourie sur " title="Utilisation de RAM pour les docker-compose de l'eunuque (pour voir une stat, mettre la sourie sur "
f"le haut des courbes, vers les points), généré le {now.strftime('%d/%m/%Y %H:%M:%S')}", f"le haut des courbes, vers les points), généré le {now.strftime('%d/%m/%Y %H:%M:%S')}",
xaxis_title="Temps", xaxis_title="Temps",
yaxis_title="RAM", yaxis_title="RAM (MiB)",
legend={"traceorder":"reversed"}
) )
fig.write_html(html_fn, include_plotlyjs="cdn") fig.write_html(html_fn, include_plotlyjs="cdn")
log.info(f"Report generate in {html_fn}") log.info(f"Report generate in {html_fn}")
def main(): def main():
sqlite_fn, html_fn, debug = parse_args() sqlite_fn, html_fn, debug, n_days_histo = parse_args()
if debug: if debug:
logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
else: else:
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
generate_html_report(sqlite_fn, html_fn) generate_html_report(sqlite_fn, html_fn, n_days_histo)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -7,7 +7,7 @@ SCRIPT_PATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
pushd "${SCRIPT_PATH}" > /dev/null pushd "${SCRIPT_PATH}" > /dev/null
venv/bin/python generate_html.py stats.sqlite /home/caracals/services/org-caracals-files/files/stats.html venv/bin/python generate_html.py stats.sqlite /home/caracals/services/org.caracals.files/files/stats.html
# Back to original path # Back to original path