git » gofer » commit 50563e3

test: Add performance tests

author Alberto Bertogli
2020-06-14 01:21:28 UTC
committer Alberto Bertogli
2020-06-14 01:42:39 UTC
parent 5c9742cac5b41827dd8ad6d841000a2da520a830

test: Add performance tests

This patch adds some performance tests, which do simple stress test
using the "wrk" benchmarking tool of gofer and nginx, and graph the
results.

These are not good tests; they are added only to help identify
regressions and get a rough idea of performance characteristics, but
they shouldn't be taken too seriously.

.gitignore +3 -0
test/perf/analysis.sh +23 -0
test/perf/gofer.yaml +17 -0
test/perf/graph.py +62 -0
test/perf/nginx.conf +33 -0
test/perf/perf.sh +54 -0
test/perf/report.lua +24 -0
test/test.sh +2 -2
test/util/lib.sh +2 -2

diff --git a/.gitignore b/.gitignore
index aa87b41..c3a2ba8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,3 +15,6 @@
 *.yaml
 *.pem
 !test/*.yaml
+
+# Files generated by the performance tests.
+test/testdata/dir/perf*
diff --git a/test/perf/analysis.sh b/test/perf/analysis.sh
new file mode 100755
index 0000000..a6dac9c
--- /dev/null
+++ b/test/perf/analysis.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+set -e
+
+. $(dirname ${0})/../util/lib.sh
+init
+
+# Merge all CSVs into one.
+echo -n "server,size,duration,requests," > .perf-out/all.csv
+echo -n "bytes,errors,reqps,byteps," >> .perf-out/all.csv
+echo -n "latmean,lat50,lat90,lat99,lat99.9," >> .perf-out/all.csv
+echo "lat99.99,lat99.999," >> .perf-out/all.csv
+for d in gofer nginx; do
+	for s in 1k 10k 100k 250k 500k 1M 10M; do
+		echo "$d,$s,`tail -n 1 .perf-out/$d-$s.csv`" \
+			>> .perf-out/all.csv
+	done
+done
+
+# Graph.
+python3 perf/graph.py
+
+echo "file://$PWD/.perf-out/results.html"
diff --git a/test/perf/gofer.yaml b/test/perf/gofer.yaml
new file mode 100644
index 0000000..40e6e64
--- /dev/null
+++ b/test/perf/gofer.yaml
@@ -0,0 +1,17 @@
+
+control_addr: "127.0.0.1:8459"
+
+http:
+  ":8450":
+    routes:
+      "/":
+        dir: "testdata/dir"
+
+    #reqlog:
+    #  "/": requests
+
+#reqlog:
+#  "requests":
+#    file: ".gofer-perf.requests.log"
+#    bufsize: 2048
+
diff --git a/test/perf/graph.py b/test/perf/graph.py
new file mode 100644
index 0000000..48fad3f
--- /dev/null
+++ b/test/perf/graph.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+
+# Dependencies: python3-pandas python3-plotly
+
+import pandas as pd
+import plotly.graph_objects as go
+from plotly.subplots import make_subplots
+import plotly.colors
+
+df = pd.read_csv(".perf-out/all.csv")
+
+fig = make_subplots(
+    rows=2, cols=2,
+    horizontal_spacing = 0.1,
+    vertical_spacing = 0.1,
+    subplot_titles=(
+        "Requests per second",
+        "Latency: 90%ile", "Latency: 99%ile", "Latency: 99.9%ile"),
+)
+
+fig.update_yaxes(row=1, col=1, rangemode="tozero")
+fig.update_yaxes(row=1, col=2, title_text="milliseconds",
+        rangemode="tozero")
+fig.update_yaxes(row=2, col=1, title_text="milliseconds",
+        rangemode="tozero")
+fig.update_yaxes(row=2, col=2, title_text="milliseconds",
+        rangemode="tozero")
+
+fig.update_layout(legend_orientation="h", hovermode="x")
+
+colors = plotly.colors.DEFAULT_PLOTLY_COLORS
+for i, s in enumerate(set(df.server.values)):
+    dfs = df[df.server == s]
+    color = colors[i]
+
+    fig.add_trace(
+            go.Scatter(
+                x=dfs["size"],
+                y=dfs.reqps,
+                mode='lines+markers',
+                line=dict(color=color),
+                showlegend=True,
+                name=s),
+            row=1, col=1)
+
+    for (row, col), k in [
+            ((1, 2), "lat90"),
+            ((2, 1), "lat99"),
+            ((2, 2), "lat99.9")]:
+        fig.add_trace(
+                go.Scatter(
+                    x=dfs["size"],
+                    y=dfs[k]/1000,  # convert us -> ms
+                    mode='lines+markers',
+                    line=dict(color=color),
+                    showlegend=False,
+                    name=s),
+                row=row, col=col)
+
+
+fig.write_html('.perf-out/results.html', auto_open=False)
+
diff --git a/test/perf/nginx.conf b/test/perf/nginx.conf
new file mode 100644
index 0000000..0579284
--- /dev/null
+++ b/test/perf/nginx.conf
@@ -0,0 +1,33 @@
+# Run with:
+#   nginx -c nginx.conf -p $REPO_ROOT
+
+error_log  .nginx.log info;
+daemon            off;
+worker_processes  2;
+pid .nginx.pid;
+
+events {
+	use           epoll;
+	worker_connections  128;
+}
+
+
+http {
+	server_tokens off;
+	include       /etc/nginx/mime.types;
+	charset       utf-8;
+
+	# access_log    .logs/access.log  combined;
+	access_log off;
+
+	server {
+		server_name   localhost;
+		listen        127.0.0.1:8077;
+
+		location      / {
+			root      testdata/dir;
+		}
+
+	}
+
+}
diff --git a/test/perf/perf.sh b/test/perf/perf.sh
new file mode 100755
index 0000000..31d30c9
--- /dev/null
+++ b/test/perf/perf.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+set -e
+
+. $(dirname ${0})/../util/lib.sh
+init
+
+function nginx_bg() {
+	nginx -c perf/nginx.conf -p $PWD &
+	PID=$!
+}
+
+export DURATION=${DURATION:-5s}
+
+function runwrk() {
+	wrk -d $DURATION -s perf/report.lua "$@"
+}
+
+echo "## Performance"
+
+echo "### Setup"
+
+gofer_bg -logfile=.perf.log -configfile=perf/gofer.yaml
+GOFER_PID=$PID
+wait_until_ready 8450
+
+nginx_bg
+NGINX_PID=$PID
+wait_until_ready 8077
+
+rm -rf .perf-out/
+mkdir -p .perf-out/
+
+snoop
+
+for s in 1k 10k 100k 250k 500k 1M 10M; do
+	echo "### Size: $s"
+	truncate -s $s testdata/dir/perf-$s
+
+	echo "#### gofer"
+	runwrk "http://localhost:8450/perf-$s"
+	mv wrkout.csv .perf-out/gofer-$s.csv
+	echo
+	snoop
+
+	echo "#### nginx"
+	runwrk "http://localhost:8077/perf-$s"
+	mv wrkout.csv .perf-out/nginx-$s.csv
+	echo
+	snoop
+done
+
+echo "### Analysis"
+perf/analysis.sh
diff --git a/test/perf/report.lua b/test/perf/report.lua
new file mode 100644
index 0000000..144a824
--- /dev/null
+++ b/test/perf/report.lua
@@ -0,0 +1,24 @@
+-- wrk script used to get the results in a computer-friendly format.
+function done(summary, latency, requests)
+	local errors = summary.errors.connect + summary.errors.read +
+		summary.errors.write + summary.errors.status +
+		summary.errors.timeout
+
+	f = assert(io.open("wrkout.csv", "w+"))
+
+	f:write("duration,requests,bytes,errors,")
+	f:write("reqps,byteps,latmean,")
+	f:write("lat50,lat90,lat99,lat99.9,lat99.99,lat99.999\n")
+
+	f:write(string.format("%d,%d,%d,%d,",
+		summary.duration, summary.requests, summary.bytes, errors))
+	f:write(string.format("%f,%f,%f,",
+		summary.requests / (summary.duration/1000000.0),
+		summary.bytes / (summary.duration/1000000.0),
+		latency.mean))
+	for _, p in pairs({ 50, 90, 99, 99.9, 99.99, 99.999 }) do
+		n = latency:percentile(p)
+		f:write(string.format("%d,", n))
+	end
+	f:write("\n")
+end
diff --git a/test/test.sh b/test/test.sh
index 7b35c42..c474a52 100755
--- a/test/test.sh
+++ b/test/test.sh
@@ -14,13 +14,13 @@ build
 rm -f .01-fe.requests.log .01-be.requests.log
 
 # Launch the backend serving static files and CGI.
-gofer_bg -logfile=.01-be.log -configfile=01-be.yaml
+gofer_bg -v=3 -logfile=.01-be.log -configfile=01-be.yaml
 BE_PID=$PID
 wait_until_ready 8450
 
 # Launch the test instance.
 generate_certs
-gofer_bg -logfile=.01-fe.log -configfile=01-fe.yaml
+gofer_bg -v=3 -logfile=.01-fe.log -configfile=01-fe.yaml
 FE_PID=$PID
 wait_until_ready 8441  # http
 wait_until_ready 8442  # https
diff --git a/test/util/lib.sh b/test/util/lib.sh
index 608ecb2..6ba248c 100644
--- a/test/util/lib.sh
+++ b/test/util/lib.sh
@@ -39,7 +39,7 @@ function set_cover() {
 
 function gofer() {
 	set_cover
-	../gofer $COVER_ARGS -v=3  "$@"  >> .out.log 2>&1
+	../gofer $COVER_ARGS  "$@"  >> .out.log 2>&1
 }
 
 # Run gofer in the background (sets $PID to its process id).
@@ -47,7 +47,7 @@ function gofer_bg() {
 	# Duplicate gofer() because if we put the function in the background,
 	# the pid will be of bash, not the subprocess.
 	set_cover
-	../gofer $COVER_ARGS -v=3  "$@"  >> .out.log 2>&1 &
+	../gofer $COVER_ARGS  "$@"  >> .out.log 2>&1 &
 	PID=$!
 }