mirror of
https://github.com/owncast/owncast.git
synced 2024-11-28 11:09:01 +03:00
WIP new load test suite
This commit is contained in:
parent
b691fe51fd
commit
4242d1731d
2 changed files with 125 additions and 0 deletions
38
test/load/new/README.md
Normal file
38
test/load/new/README.md
Normal file
|
@ -0,0 +1,38 @@
|
|||
# Owncast Load Testing
|
||||
|
||||
Load testing is an important tool to surface bugs, race conditions, and to determine the overall server performance of Owncast. The primary goal is to test the server components, not so much the front-end, as the performance of the browser and the client components may be variable. While the test aims to push a bunch of traffic through the backend it's possible the frontend may not be able to handle it all. Working on the performance of the frontend is a goal that should be treated separately from what the backend load tests are designed to do.
|
||||
|
||||
## What it will test
|
||||
|
||||
The test aims to reproduce the same requests and actions performed by a normal user joining an Owncast session, but at a rate that's faster than most normal environments.
|
||||
|
||||
## This includes
|
||||
|
||||
1. Downloads the configuration.
|
||||
1. Registering as a brand new chat user.
|
||||
1. Fetches the chat history.
|
||||
1. Connects to the chat websocket and sends messages.
|
||||
1. Access the viewer count `ping` endpoint to count this virtual user as a viewer.
|
||||
1. Fetches the current status.
|
||||
|
||||
## Setup your environment
|
||||
|
||||
1. Install [k6](https://k6.io/open-source) by following [the instructions for your local machine](https://k6.io/docs/getting-started/installation/).
|
||||
1. Start Owncast on your local machine, listening on `localhost:8080`.
|
||||
|
||||
## Run the tests
|
||||
|
||||
1. To monitor the concurrent chat users open the admin chat user's page at http://localhost:8080/admin/chat/users/.
|
||||
1. To monitor the concurrent "viewers" open the admin viewers page at http://localhost:8080/admin/viewer-info/.
|
||||
1. Begin the test suite by running `k6 run test.js`.
|
||||
|
||||
|
||||
## troubleshooting
|
||||
|
||||
If you receive the error `ERRO[0080] dial tcp 127.0.0.1:8080: socket: too many open files` it means your OS is not configured to have enough concurrent sockets open to perform the level of testing the test is trying to accomplish.
|
||||
|
||||
Using [ulimit](https://www.learnitguide.net/2015/07/how-to-increase-ulimit-values-in-linux.html) you can adjust this value.
|
||||
|
||||
Run `ulimit -n` and see what you're currently set at. The default is likely `1024`, meaning your system can only open 1024 resources (files or sockets or anything) at the same time. If you run `ulimit -Hn` you can get the _hard limit_ of your system. So you can adjust your limit to be something between where you're at now and your hard limit. `ulimit -n 10000` for example.
|
||||
|
||||
As a side note, Owncast automatically increases its own limit when you run the Owncast service to make it less likely that your Owncast server will hit this limit.
|
87
test/load/new/test.js
Normal file
87
test/load/new/test.js
Normal file
|
@ -0,0 +1,87 @@
|
|||
import http from 'k6/http';
|
||||
import ws from 'k6/ws';
|
||||
|
||||
import { check, sleep } from 'k6';
|
||||
|
||||
const baseUserAgent = 'Owncast LoadTest/1.0';
|
||||
|
||||
function randomNumber() {
|
||||
return Math.floor(Math.random() * 10000) + 1
|
||||
}
|
||||
|
||||
function pingViewerAPI() {
|
||||
// Fake the user-agent so the server side mapping to agent+ip
|
||||
// sees each user as unique.
|
||||
var params = {
|
||||
headers: {
|
||||
'User-Agent': 'test-client-' + randomNumber(),
|
||||
},
|
||||
};
|
||||
|
||||
http.get('http://localhost:8080/api/ping', params);
|
||||
}
|
||||
|
||||
function fetchHLSPlaylist() {
|
||||
http.get('http://localhost:8080/hls/stream.m3u8');
|
||||
}
|
||||
|
||||
function connectToChat() {
|
||||
const response = http.post('http://localhost:8080/api/chat/register');
|
||||
check(response, { 'status was 200': (r) => r.status == 200 });
|
||||
const accessToken = response.json('accessToken');
|
||||
|
||||
const params = {
|
||||
headers: {
|
||||
"User-Agent": `${baseUserAgent} (iteration ${__ITER}; virtual-user ${__VU})`,
|
||||
}
|
||||
};
|
||||
|
||||
var res = ws.connect(
|
||||
`ws://127.0.0.1:8080/ws?accessToken=${accessToken}`,
|
||||
params,
|
||||
function (socket) {
|
||||
socket.on('open', function (data) {
|
||||
const testMessage = {
|
||||
body: `Test message ${randomNumber()}`,
|
||||
type: 'CHAT',
|
||||
};
|
||||
sleep(10); // After a user joins they wait 10 seconds to send a message
|
||||
socket.send(JSON.stringify(testMessage));
|
||||
sleep(60); // The user waits a minute after sending a message to leave.
|
||||
socket.close();
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export default function () {
|
||||
pingViewerAPI();
|
||||
fetchHLSPlaylist();
|
||||
connectToChat();
|
||||
}
|
||||
|
||||
export let options = {
|
||||
userAgent: baseUserAgent,
|
||||
scenarios: {
|
||||
// starting: {
|
||||
// executor: 'shared-iterations',
|
||||
// gracefulStop: '5s',
|
||||
// vus: 10,
|
||||
// iterations: 100,
|
||||
// env: { SEND_MESSAGES: "true" },
|
||||
// },
|
||||
loadstages: {
|
||||
executor: 'ramping-vus',
|
||||
startVUs: 0,
|
||||
gracefulStop: '5s',
|
||||
stages: [
|
||||
{ duration: '10s', target: 5 },
|
||||
{ duration: '30s', target: 100 },
|
||||
{ duration: '120s', target: 1000 },
|
||||
{ duration: '300s', target: 5000 },
|
||||
],
|
||||
gracefulRampDown: '10s',
|
||||
env: { SEND_MESSAGES: "false" },
|
||||
}
|
||||
}
|
||||
};
|
Loading…
Reference in a new issue