first version

This commit is contained in:
Alfredo Oliviero 2024-07-11 14:15:16 +02:00
commit ad728c1f8f
5 changed files with 378 additions and 0 deletions

247
.gitignore vendored Normal file
View File

@ -0,0 +1,247 @@
logs
results
.DS_Store
# Created by https://www.toptal.com/developers/gitignore/api/python,linux,macos,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=python,linux,macos,visualstudiocode
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### macOS Patch ###
# iCloud generated files
*.icloud
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/
# LSP config files
pyrightconfig.json
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide
# End of https://www.toptal.com/developers/gitignore/api/python,linux,macos,visualstudiocode

0
README.md Normal file
View File

116
analyze_startup.py Normal file
View File

@ -0,0 +1,116 @@
import pandas as pd
import matplotlib.pyplot as plt
import re
import os
from datetime import datetime
import json
# Optional variable to define the directory of logs
logs_dir = None # Example: "./logs"
# Explicit list of files and labels
files_and_labels = [
# ("./logs/startup_lr62-pre-02.log", "lr62-pre-02"),
# ("./logs/startup_lr62-pre-01.log", "lr62-pre-01"),
# ("./logs/startup_lr62-dev.log", "lr62-dev"),
("./logs/startup_lr62-prod-02.log", "lr62-prod-02"),
("./logs/startup_lr62-prod-01.log", "lr62-prod-01"),
]
# Function to load data from files
def load_data(file_path):
with open(file_path, 'r') as file:
lines = file.readlines()
data = []
for line in lines:
date = re.search(r'(\d{4}-\d{2}-\d{2})', line).group(0)
startup_time = re.search(r'Server startup in (\d+) ms', line).group(1)
data.append([date, int(startup_time)])
df = pd.DataFrame(data, columns=["Date", "StartupTime"])
df["Date"] = pd.to_datetime(df["Date"])
df["StartupTimeSeconds"] = df["StartupTime"] / 1000 # convert to seconds
return df
# Function to find files and labels in the log directory
def find_files_and_labels(logs_dir):
files_and_labels = []
for file_name in os.listdir(logs_dir):
if file_name.endswith(".log"):
file_path = os.path.join(logs_dir, file_name)
label = re.search(r'startup_(.+)\.log', file_name).group(1) # Infer label from file name
files_and_labels.append((file_path, label))
return files_and_labels
# If logs_dir is defined, populate files_and_labels from the directory
if logs_dir:
files_and_labels = find_files_and_labels(logs_dir)
# Print the list of calculated files and labels in a readable format
print("files_and_labels = [")
for file_path, label in files_and_labels:
print(f' ("{file_path}", "{label}"),')
print("]")
min_date_str = "2023-07-01" # Default start date
max_date_str = None # Default end date
# Convert date strings to datetime objects
min_date = pd.to_datetime(min_date_str) if min_date_str else None
max_date = pd.to_datetime(max_date_str) if max_date_str else None
# Use the current date if max_date_str is None
current_date_str = datetime.now().strftime("%Y-%m-%d")
date_range_str = f"{min_date_str}_to_{max_date_str if max_date_str else current_date_str}"
# Ensure the results directory exists
results_dir = "./results"
os.makedirs(results_dir, exist_ok=True)
# Plot setup
plt.figure(figsize=(14, 7))
# Dictionary to hold data for JSON export
export_data = {}
# Iterate through each file and label
for file_path, label in files_and_labels:
# Load data from file
df = load_data(file_path)
# Filter data based on date range
if min_date is not None:
df = df[df["Date"] >= min_date]
if max_date is not None:
df = df[df["Date"] <= max_date]
# Sort data by date
df_sorted = df.sort_values(by="Date")
plt.plot(df_sorted["Date"], df_sorted["StartupTimeSeconds"], marker='o', label=label)
# Add data to the dictionary for JSON export
export_data[label] = df_sorted[["Date", "StartupTimeSeconds"]].to_dict(orient="records")
# Create the filename base using the date range and labels
labels_str = "_".join([label for _, label in files_and_labels])
filename_base = f"{results_dir}/server_startup_{labels_str}_{date_range_str}"
png_filename = f"{filename_base}.png"
json_filename = f"{filename_base}.json"
plt.title("Server Startup Time Over Time (Filtered, in seconds)")
plt.xlabel("Date")
plt.ylabel("Startup Time (seconds)")
plt.grid(True)
plt.legend()
plt.xticks(rotation=45)
plt.tight_layout()
# Save the plot as a PNG file
plt.savefig(png_filename)
# Save the filtered data as a JSON file
with open(json_filename, 'w') as json_file:
json.dump(export_data, json_file, indent=4, default=str)
plt.show()

13
get_startup_logs.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/bash
DEST_FOLDER=logs
# Remove existing logs directory and create a new one
mkdir -p $DEST_FOLDER
# Define an array of servers
SERVERS=( "lr62-pre-01" "lr62-pre-02" "lr62-dev" "lr62-prod-01" "lr62-prod-02")
# Loop through each server and execute the grep command, saving the results
for SERVER in "${SERVERS[@]}"; do
ssh "$SERVER" 'grep -R "Server startup in" /home/life/Portal-Bundle/tomcat-7.0.62/logs/*' > "$DEST_FOLDER/startup_$SERVER.log"
done

2
requirements.txt Normal file
View File

@ -0,0 +1,2 @@
pandas
matplotlib