Skip to content

Commit

Permalink
Performance Test Parse Metrics (3) (#200)
Browse files Browse the repository at this point in the history
  • Loading branch information
louiseschmidtgen authored Nov 14, 2024
1 parent 81298dd commit 5726162
Show file tree
Hide file tree
Showing 9 changed files with 200 additions and 56 deletions.
100 changes: 69 additions & 31 deletions .github/workflows/performance.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ on:
pull_request:

permissions:
pull-requests: write
contents: read

concurrency:
Expand Down Expand Up @@ -39,6 +40,10 @@ jobs:
uses: actions/setup-go@v5
with:
go-version: "1.22"
- name: Set-up R
run: |
sudo apt install r-base
sudo Rscript -e 'install.packages(c("ggplot2", "dplyr", "optparse"), repos="https://cloud.r-project.org")'
- name: Install lxd
run: |
sudo snap refresh lxd --channel 5.21/stable
Expand All @@ -57,21 +62,13 @@ jobs:
- name: Unpack Snap
run: |
sudo unsquashfs -d snap-unpack-dir k8s.snap
- name: Create snap with k8s-dqlite ${{ github.head_ref }}
- name: Create snap with k8s-dqlite ${{ github.head_ref }} code
run: |
make static
sudo cp ./bin/static/k8s-dqlite snap-unpack-dir/bin/k8s-dqlite
sudo chmod o+r snap-unpack-dir/bin/k8s-dqlite
sudo mksquashfs snap-unpack-dir head.snap -noappend -comp lzo -no-fragments
- name: Run Performance test ${{ github.head_ref }} snap
env:
TEST_SNAP: ${{ github.workspace }}/head.snap
TEST_SUBSTRATE: lxd
TEST_LXD_IMAGE: ubuntu:22.04
TEST_INSPECTION_REPORTS_DIR: ${{ github.workspace }}/inspection-reports
run: |
cd test/performance && sg lxd -c 'tox -e performance'
- name: Create snap with k8s-dqlite base code
- name: Create snap with k8s-dqlite base-code
run: |
set -o pipefail
git fetch origin $BASE_BRANCH
Expand All @@ -80,16 +77,6 @@ jobs:
sudo cp ./bin/static/k8s-dqlite snap-unpack-dir/bin/k8s-dqlite
sudo chmod o+r snap-unpack-dir/bin/k8s-dqlite
sudo mksquashfs snap-unpack-dir base-code.snap -noappend -comp lzo -no-fragments
- name: Switch back to target branch
run: git reset --hard $TARGET_SHA
- name: Run Performance test for base code snap
env:
TEST_SNAP: ${{ github.workspace }}/base-code.snap
TEST_SUBSTRATE: lxd
TEST_LXD_IMAGE: ubuntu:22.04
TEST_INSPECTION_REPORTS_DIR: ${{ github.workspace }}/inspection-reports
run: |
cd test/performance && sg lxd -c 'tox -e performance'
- name: Create snap with k8s-dqlite v1.1.11
run: |
set -o pipefail
Expand All @@ -99,16 +86,6 @@ jobs:
sudo cp ./bin/static/k8s-dqlite snap-unpack-dir/bin/k8s-dqlite
sudo chmod o+r snap-unpack-dir/bin/k8s-dqlite
sudo mksquashfs snap-unpack-dir v1-1-11.snap -noappend -comp lzo -no-fragments
- name: Switch back to target branch
run: git reset --hard $TARGET_SHA
- name: Run Performance test for v1.1.11 snap
env:
TEST_SNAP: ${{ github.workspace }}/v1-1-11.snap
TEST_SUBSTRATE: lxd
TEST_LXD_IMAGE: ubuntu:22.04
TEST_INSPECTION_REPORTS_DIR: ${{ github.workspace }}/inspection-reports
run: |
cd test/performance && sg lxd -c 'tox -e performance'
- name: Create snap with k8s-dqlite v1.2.0
run: |
set -o pipefail
Expand All @@ -120,14 +97,75 @@ jobs:
sudo mksquashfs snap-unpack-dir v1-2-0.snap -noappend -comp lzo -no-fragments
- name: Switch back to target branch
run: git reset --hard $TARGET_SHA
- name: Run Performance test ${{ github.head_ref }} snap
env:
TEST_SNAP: ${{ github.workspace }}/head.snap
TEST_SUBSTRATE: lxd
TEST_LXD_IMAGE: ubuntu:22.04
TEST_INSPECTION_REPORTS_DIR: ${{ github.workspace }}/inspection-reports
TEST_METRICS_DIR: ${{ github.workspace }}/test/performance/results/head
TEST_RUN_NAME: head
run: |
cd test/performance
mkdir -p ./results/head
sg lxd -c 'tox -e performance'
- name: Run Performance test for base code snap
env:
TEST_SNAP: ${{ github.workspace }}/base-code.snap
TEST_SUBSTRATE: lxd
TEST_LXD_IMAGE: ubuntu:22.04
TEST_INSPECTION_REPORTS_DIR: ${{ github.workspace }}/inspection-reports
TEST_METRICS_DIR: ${{ github.workspace }}/test/performance/results/base-code
TEST_RUN_NAME: base-code
run: |
cd test/performance
mkdir -p ./results/base-code
sg lxd -c 'tox -e performance'
- name: Run Performance test for v1.1.11 snap
env:
TEST_SNAP: ${{ github.workspace }}/v1-1-11.snap
TEST_SUBSTRATE: lxd
TEST_LXD_IMAGE: ubuntu:22.04
TEST_INSPECTION_REPORTS_DIR: ${{ github.workspace }}/inspection-reports
TEST_METRICS_DIR: ${{ github.workspace }}/test/performance/results/v1-1-11
TEST_RUN_NAME: v1-1-11
run: |
cd test/performance
mkdir -p ./results/v1-1-11
sg lxd -c 'tox -e performance'
- name: Run Performance test for v1.2.0 snap
env:
TEST_SNAP: ${{ github.workspace }}/v1-2-0.snap
TEST_SUBSTRATE: lxd
TEST_LXD_IMAGE: ubuntu:22.04
TEST_INSPECTION_REPORTS_DIR: ${{ github.workspace }}/inspection-reports
TEST_METRICS_DIR: ${{ github.workspace }}/test/performance/results/v1-2-0
TEST_RUN_NAME: v1-2-0
run: |
cd test/performance
mkdir -p ./results/v1-2-0
sg lxd -c 'tox -e performance'
- name: Generate 3 node Graphs
run: |
cd test/performance && sg lxd -c 'tox -e performance'
cd test/performance
sudo Rscript parse-performance-metrics.R -p ./results/head -o ./results/head -f *three-node.log
sudo Rscript parse-performance-metrics.R -p ./results/base-code -o ./results/base-code -f *three-node.log
sudo Rscript parse-performance-metrics.R -p ./results/v1-1-11 -o ./results/v1-1-11 -f *three-node.log
sudo Rscript parse-performance-metrics.R -p ./results/v1-2-0 -o ./results/v1-2-0 -f *three-node.log
- name: Generate single node Graphs
run: |
cd test/performance
mkdir -p ./results/single-node
cp ./results/head/*single-node.log ./results/single-node
cp ./results/base-code/*single-node.log ./results/single-node
cp ./results/v1-1-11/*single-node.log ./results/single-node
cp ./results/v1-2-0/*single-node.log ./results/single-node
sudo Rscript parse-performance-metrics.R -p ./results/single-node -o ./results/single-node -f *single-node.log
- name: Upload performance result
uses: actions/upload-artifact@v4
with:
name: performance-results
path: ${{ github.workspace }}/test/performance/results
- name: Prepare inspection reports
if: failure()
run: |
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,4 @@ bin/
k8s-dqlite
hack/.build/
hack/.deps/
test/performance/results/*
31 changes: 31 additions & 0 deletions test/performance/Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,34 @@ export TEST_SKIP_CLEANUP=1 # (optionally) do not destroy m

cd test/performance && tox -e performance
```

## Parsing performance test metrics

The performance metrics parsing and creation of graphs is done in R.

The script allows you to pass three options in order to generate the graphs:

- `-p` or `--path` - the path to the directory where the performance test results are stored. By default, this is the `./results` directory
- `-o` or `--output` - the path to the directory where the graphs will be stored. By default, this is the `./results` directory
- `-f` or `--filepattern` - the pattern of the files that will be parsed, by default these are old files ending in `.log`

The script will generate the following graphs for all files matching the pattern in the specified directory:

- `cpu` - the CPU usage of the system
- `memory` - the memory usage of the system
- `io_reads` - the number of read operations on the disk
- `io_writes` - the number of write operations on the disk

One time setup for installing R and the required packages:

```bash
sudo apt install r-base
sudo Rscript -e 'install.packages(c("ggplot2", "dplyr", "optparse"), repos="https://cloud.r-project.org")'
```

The script can be run with the following command:

```bash
cd test/performance
Rscript parse_performance_metrics.R
```
73 changes: 73 additions & 0 deletions test/performance/parse-performance-metrics.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
#!/usr/bin/env Rscript

# Load necessary libraries
library(ggplot2)
library(dplyr)
library(optparse)

# Define command-line options
option_list = list(
make_option(c("-o", "--out"), type = "character", default = ".",
help = "Output path for plots"),
make_option(c("-p", "--path"), type = "character", default = ".",
help = "Path to metrics files"),

make_option(c("-f", "--filepattern"), type = "character", default = "*.log",
help = "File pattern to match metrics files")
)

# Parse command-line options
opt_parser = OptionParser(option_list = option_list)
opt = parse_args(opt_parser)

metrics_files <- list.files(opt$path, pattern = opt$filepattern, full.names = TRUE)

print(paste("Found", length(metrics_files), "metrics files"))

# Prepare file paths
metrics_data <- list()

# Read and process each file
for (file in metrics_files) {
if (!file.exists(file)) {
warning("File not found: ", file)
next
}

# Read and clean data
df <- read.table(file, header = FALSE, skip = 3,
col.names = c("Time", "UID", "PID", "%usr", "%system", "%guest", "%wait", "%CPU",
"CPU", "minflt/s", "majflt/s", "VSZ", "RSS", "%MEM",
"kB_rd_s", "kB_wr_s", "kB_ccwr_s", "iodelay", "command"),
stringsAsFactors = FALSE)

# Convert numeric columns and handle NA values
df[2:(ncol(df) - 1)] <- lapply(df[2:(ncol(df) - 1)], function(x) as.numeric(as.character(x)))
df[is.na(df)] <- 0

metrics_data[[basename(file)]] <- df
}

# Verify data is loaded
if (length(metrics_data) < 1) {
stop("No valid data loaded. Check file paths or contents.", call. = FALSE)
}

# Define function to create and save plots
create_plot <- function(data_list, y_column, title, y_label, file_suffix) {
plot <- ggplot() +
lapply(names(data_list), function(node) {
geom_line(data = data_list[[node]], aes(x = 1:nrow(data_list[[node]]), y = !!sym(y_column), color = node))
}) +
labs(title = title, x = "Time", y = y_label)

# Save plot
ggsave(filename = file.path(opt$out, paste0(file_suffix, ".png")), plot = plot, width = 8, height = 6)
}

print("Creating plots")
# Generate and save all required plots
create_plot(metrics_data, "X.CPU", "CPU Usage Over Time", "% CPU", "cpu_usage_plot")
create_plot(metrics_data, "X.MEM", "Memory Usage Over Time", "% MEM", "mem_usage_plot")
create_plot(metrics_data, "kB_rd_s", "IO Read Usage Over Time", "kB_rd_s", "io_read_usage_plot")
create_plot(metrics_data, "kB_wr_s", "IO Write Usage Over Time", "kB_wr_s", "io_write_usage_plot")
5 changes: 1 addition & 4 deletions test/performance/tests/test_multi_node.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
#
# Copyright 2024 Canonical, Ltd.
#
import logging
from typing import List

import pytest
from test_util import harness, metrics, util

LOG = logging.getLogger(__name__)


@pytest.mark.node_count(3)
def test_three_node_load(instances: List[harness.Instance]):
Expand Down Expand Up @@ -36,4 +33,4 @@ def test_three_node_load(instances: List[harness.Instance]):
process_dict = metrics.collect_metrics(instances)
metrics.run_kube_burner(cluster_node)
metrics.stop_metrics(instances, process_dict)
metrics.pull_metrics(instances)
metrics.pull_metrics(instances, "three-node")
6 changes: 1 addition & 5 deletions test/performance/tests/test_single_node.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,13 @@
#
# Copyright 2024 Canonical, Ltd.
#
import logging

from test_util import harness, metrics

LOG = logging.getLogger(__name__)


def test_single_node_load(session_instance: harness.Instance):
"""Test the performance of a single node cluster with all features enabled."""
metrics.configure_kube_burner(session_instance)
process_dict = metrics.collect_metrics([session_instance])
metrics.run_kube_burner(session_instance)
metrics.stop_metrics([session_instance], process_dict)
metrics.pull_metrics([session_instance])
metrics.pull_metrics([session_instance], "single-node")
10 changes: 4 additions & 6 deletions test/performance/tests/test_util/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@

MANIFESTS_DIR = DIR / ".." / ".." / "templates"

METRICS_DIR = os.getenv("TEST_METRICS_DIR") or DIR / ".." / ".." / "results"

RUN_NAME = os.getenv("TEST_RUN_NAME") or "k8s"

# INSPECTION_REPORTS_DIR is the directory where inspection reports are stored.
# If empty, no reports are generated.
INSPECTION_REPORTS_DIR = os.getenv("TEST_INSPECTION_REPORTS_DIR")
Expand Down Expand Up @@ -42,11 +46,5 @@
# LXD_IMAGE is the image to use for LXD containers.
LXD_IMAGE = os.getenv("TEST_LXD_IMAGE") or "ubuntu:22.04"

# LXD_PROFILE is the profile to use for LXD containers.
LXD_PROFILE = (
os.getenv("TEST_LXD_PROFILE")
or (DIR / ".." / ".." / "lxd-profile.yaml").read_text()
)

# LXD_PROFILE_NAME is the profile name to use for LXD containers.
LXD_PROFILE_NAME = os.getenv("TEST_LXD_PROFILE_NAME") or "k8s-performance"
14 changes: 11 additions & 3 deletions test/performance/tests/test_util/harness/lxd.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(self):
self.image = config.LXD_IMAGE
self.instances = set()

self._configure_profile(self.profile, config.LXD_PROFILE)
self._configure_profile(self.profile)

self._configure_network(
"lxdbr0",
Expand Down Expand Up @@ -74,7 +74,7 @@ def new_instance(self, network_type: str = "IPv4") -> Instance:
self.exec(instance_id, ["snap", "wait", "system", "seed.loaded"])
return Instance(self, instance_id)

def _configure_profile(self, profile_name: str, profile_config: str):
def _configure_profile(self, profile_name: str):
LOG.debug("Checking for LXD profile %s", profile_name)
try:
run(["lxc", "profile", "show", profile_name])
Expand All @@ -90,9 +90,17 @@ def _configure_profile(self, profile_name: str, profile_config: str):

try:
LOG.debug("Configuring LXD profile %s", profile_name)
profile = run(
[
"curl",
"s",
"https://raw.githubusercontent.com/canonical/k8s-snap/refs/heads/main/tests/integration/lxd-profile.yaml",
],
capture_output=True,
).stdout
run(
["lxc", "profile", "edit", profile_name],
input=profile_config.encode(),
input=profile,
)
except subprocess.CalledProcessError as e:
raise HarnessError(f"Failed to configure LXD profile {profile_name}") from e
Expand Down
Loading

0 comments on commit 5726162

Please sign in to comment.