diff --git a/tools/k6/covers_cold.js b/tools/k6/covers_cold.js new file mode 100644 index 000000000..d2b8c548e --- /dev/null +++ b/tools/k6/covers_cold.js @@ -0,0 +1,57 @@ +import http from "k6/http"; +import { sleep } from "k6"; + +/** + * Fetches up to 20 pages from the first archive, ensuring the cache is cold. + * + * HOWEVER, it does not delete any existing thumbnails from thumbs/ so you need to do that if + * needed. + * + * k6 run tools/k6/covers_cold.js + */ +const ParallelThumbnails = 6; +const MaxThumbnails = 100; +const BaseUrl = "http://localhost:3000"; + +export const options = { + vus: 1, + iterations: 1, +}; + +function arrayChunk(arr, size) { + return arr.reduce((acc, _, i) => { + if (i % size === 0) acc.push(arr.slice(i, i + size)); + return acc; + }, []); +} + +function awaitJobs(jobs) { + const urls = jobs.map((job) => `${BaseUrl}/api/minion/${job}`); + + for (;;) { + const ret = http.batch(urls).filter((x) => x.json().state !== "finished"); + if (ret.length === 0) { + break; + } + sleep(0.5); + } +} + +export function setup() { + const resp = http.get(`${BaseUrl}/api/archives`).json(); + return { archives: resp }; +} + +export default function (data) { + const { archives } = data; + + http.del(`${BaseUrl}/api/tempfolder`); + for (const chunk of arrayChunk(archives.slice(0, MaxThumbnails), ParallelThumbnails)) { + const thumbnails = chunk.map((archive) => `${BaseUrl}/api/archives/${archive.arcid}/thumbnail?no_fallback=true`); + + // If the thumbnail is not available yet, wait for creation + const resp = http.batch(thumbnails); + const jobs = resp.filter((x) => x.status === 202).map((x) => x.json().job); + awaitJobs(jobs); + } +} diff --git a/tools/k6/page_fetching.js b/tools/k6/page_fetching.js new file mode 100644 index 000000000..4c41d8b64 --- /dev/null +++ b/tools/k6/page_fetching.js @@ -0,0 +1,41 @@ +import http from "k6/http"; + +// Run with: k6 run tools/k6/page_fetching.js + +export const options = { + scenarios: { + cold_start: { + executor: "shared-iterations", + vus: 4, + iterations: 20, + startTime: "0s", + }, + lukewarm_start: { + executor: "shared-iterations", + vus: 4, + iterations: 10, + startTime: "10s", + }, + }, +}; + +export function setup() { + http.del("http://localhost:3000/api/tempfolder"); + + const resp = http.get("http://localhost:3000/api/archives"); + + return { archives: resp.json() }; +} + +export default function (data) { + const { archives } = data; + + const inx = Math.floor(Math.random() * archives.length); + const archive = archives[inx]; + + const files = http.get(`http://localhost:3000/api/archives/${archive.arcid}/files`).json(); + + for (const p of files.pages) { + http.get(`http://localhost:3000${p}`); + } +} diff --git a/tools/k6/single_archive_cold.js b/tools/k6/single_archive_cold.js new file mode 100644 index 000000000..92ac34d00 --- /dev/null +++ b/tools/k6/single_archive_cold.js @@ -0,0 +1,39 @@ +import http from "k6/http"; + +/** + * Fetches up to 20 pages from the first archive, ensuring the cache is cold. + * + * k6 run tools/k6/single_archive_cold.js + */ + +export const options = { + vus: 1, + iterations: 4, +}; + +function fetchAllPages(archive) { + const files = http.get(`http://localhost:3000/api/archives/${archive.arcid}/files`).json(); + + let left = 10; + for (const p of files.pages) { + http.get(`http://localhost:3000${p}`); + left -= 1; + if (left === 0) { + break; + } + } +} + +export function setup() { + const resp = http.get("http://localhost:3000/api/archives").json(); + const archive = resp[0]; + + return { archive: archive }; +} + +export default function (data) { + const { archive } = data; + + http.del("http://localhost:3000/api/tempfolder"); + fetchAllPages(archive); +} diff --git a/tools/k6/single_archive_warm.js b/tools/k6/single_archive_warm.js new file mode 100644 index 000000000..0897fe283 --- /dev/null +++ b/tools/k6/single_archive_warm.js @@ -0,0 +1,41 @@ +import http from "k6/http"; + +/** + * Fetches up to 20 pages from the first archive, ensuring the cache is warm first. + * + * k6 run tools/k6/single_archive_warm.js + */ + +export const options = { + vus: 10, + iterations: 50, +}; + +function fetchAllPages(archive) { + const files = http.get(`http://localhost:3000/api/archives/${archive.arcid}/files`).json(); + + let left = 10; + for (const p of files.pages) { + http.get(`http://localhost:3000${p}`); + left -= 1; + if (left === 0) { + break; + } + } +} + +export function setup() { + http.del("http://localhost:3000/api/tempfolder"); + + const resp = http.get("http://localhost:3000/api/archives").json(); + const archive = resp[0]; + fetchAllPages(archive); + + return { archive: archive }; +} + +export default function (data) { + const { archive } = data; + + fetchAllPages(archive); +}