Request host metrics all at once

This commit is contained in:
2026-01-21 23:48:34 +01:00
parent 6b7d6d81bb
commit 1cd07427f0
5 changed files with 170 additions and 110 deletions

View File

@@ -1,14 +1,18 @@
<script lang="ts">
import { onMount } from "svelte";
import { onDestroy, onMount } from "svelte";
import HostMetricsGraph from "./HostMetricsGraph.svelte";
import { load_host_names } from "./HostMetricsLib";
import { get_host_metrics, load_host_names, type HostMetrics } from "./HostMetricsLib";
import Expandable from "util/Expandable.svelte";
import ToggleButton from "layout/ToggleButton.svelte";
import { formatDate } from "util/Formatting";
import { loading_finish, loading_start } from "lib/Loading";
const groups: {
title: string,
graphs: {
metric: string,
agg_base?: string,
agg_divisor?: string,
data_type: string,
}[],
}[] = [
@@ -17,6 +21,7 @@ const groups: {
graphs: [
{metric: "api_request", data_type: "number"},
{metric: "api_request_duration", data_type: "duration"},
{metric: "api_request_duration_avg", agg_base: "api_request_duration", agg_divisor: "api_request", data_type: "duration"},
{metric: "api_error", data_type: "number"},
{metric: "api_panic", data_type: "number"},
],
@@ -31,7 +36,9 @@ const groups: {
graphs: [
{metric: "database_query", data_type: "number"},
{metric: "database_query_duration", data_type: "duration"},
{metric: "database_query_duration_avg", agg_base: "database_query_duration", agg_divisor: "database_query", data_type: "duration"},
{metric: "database_query_rows", data_type: "number"},
{metric: "database_query_rows_avg", agg_base: "database_query_rows", agg_divisor: "database_query", data_type: "number"},
{metric: "database_query_retries", data_type: "number"},
{metric: "database_query_error", data_type: "number"},
],
@@ -56,6 +63,8 @@ const groups: {
{metric: "pixelstore_neighbour_read_size", data_type: "bytes"},
{metric: "pixelstore_reed_solomon_read", data_type: "number"},
{metric: "pixelstore_reed_solomon_read_size", data_type: "bytes"},
{metric: "pixelstore_read_retry_success", data_type: "number"},
{metric: "pixelstore_read_retry_error", data_type: "number"},
],
}, {
title: "Pixelstore shards",
@@ -67,23 +76,109 @@ const groups: {
{metric: "pixelstore_shard_move", data_type: "number"},
{metric: "pixelstore_shard_move_size", data_type: "bytes"},
],
}, {
title: "Pixelstore API",
graphs: [
{metric: "pixelstore_api_error_400", data_type: "number"},
{metric: "pixelstore_api_error_500", data_type: "number"},
{metric: "pixelstore_api_put_file", data_type: "number"},
{metric: "pixelstore_api_put_file_size", data_type: "bytes"},
{metric: "pixelstore_api_get_file", data_type: "number"},
{metric: "pixelstore_api_file_exists", data_type: "number"},
{metric: "pixelstore_api_status", data_type: "number"},
],
},
]
let dataWindow: number = $state(60)
let dataInterval: number = $state(1)
let showAggregate: boolean = $state(false)
let metrics: HostMetrics = $state({timestamps: [], metrics: {}})
let metrics_timeout: NodeJS.Timeout = null
const load_metrics = async (window: number, interval: number) => {
if (metrics_timeout !== null) { clearTimeout(metrics_timeout) }
metrics_timeout = setTimeout(() => { load_metrics(dataWindow, dataInterval) }, 10000)
let today = new Date()
let start = new Date()
start.setMinutes(start.getMinutes() - window)
let metrics_list: string[] = []
for (const group of groups) {
for (const graph of group.graphs) {
if (graph.metric !== undefined) {
metrics_list.push(graph.metric)
}
}
}
loading_start()
try {
metrics = await get_host_metrics(start, today, metrics_list, interval)
// Format the dates
metrics.timestamps.forEach((val: string, idx: number) => {
metrics.timestamps[idx] = formatDate(val, true, true, true)
});
} catch (error) {
alert(error)
} finally {
loading_finish()
}
// If the dataset uses the duration type, we need to convert the values
// to milliseconds
for (const group of groups) {
for (const graph of group.graphs) {
if (graph.data_type === "duration" && metrics.metrics[graph.metric] !== undefined) {
for (const host of Object.keys(metrics.metrics[graph.metric])) {
for (let i = 0; i < metrics.metrics[graph.metric][host].length; i++) {
// Go durations are expressed on nanoseconds, divide by
// 1 million to convert to milliseconds
metrics.metrics[graph.metric][host][i] /= 1000000
}
}
}
// If the graph is an aggregate, we'll need to create a new dataset
// with the child datasets as base. Here we create a new dataset by
// dividing one dataset by another
if (graph.agg_base !== undefined && graph.agg_divisor !== undefined) {
for (const host of Object.keys(metrics.metrics[graph.agg_base])) {
metrics.metrics[graph.metric][host] = []
for (let i = 0; i < metrics.metrics[graph.agg_base][host].length; i++) {
if (metrics.metrics[graph.agg_divisor][host][i] > 0) {
metrics.metrics[graph.metric][host].push(
metrics.metrics[graph.agg_base][host][i] / metrics.metrics[graph.agg_divisor][host][i]
)
} else {
metrics.metrics[graph.metric][host].push(0)
}
}
}
}
}
}
}
const setWindow = (window: number, interval: number) => {
dataWindow = window
dataInterval = interval
load_metrics(dataWindow, dataInterval);
}
let loaded = $state(false)
onMount(async () => {
await load_host_names()
await load_metrics(dataWindow, dataInterval);
loaded = true
})
onDestroy(() => {
if (metrics_timeout !== null) {
clearTimeout(metrics_timeout)
}
})
</script>
{#if loaded}
@@ -111,10 +206,10 @@ onMount(async () => {
<div class="grid">
{#each group.graphs as graph (graph.metric)}
<HostMetricsGraph
window={dataWindow}
interval={dataInterval}
metric={graph.metric}
data_type={graph.data_type}
timestamps={metrics.timestamps}
metrics={metrics.metrics[graph.metric]}
aggregate={showAggregate}
/>
{/each}

View File

@@ -1,65 +1,35 @@
<script lang="ts">
import { onMount } from "svelte";
import Chart from "util/Chart.svelte";
import { host_colour, host_label } from "./HostMetricsLib";
import { get_host_metrics, type HostMetrics } from "lib/AdminAPI";
import { formatDate } from "util/Formatting";
let {
metric = "",
window = 0, // Size of the data window in minutes
interval = 0, // Interval of the datapoints in minutes
data_type = "number",
data_type = "",
timestamps = [],
metrics = {},
aggregate = false,
}: {
metric: string;
window: number;
interval: number;
data_type?: string;
aggregate?: boolean;
data_type: string;
timestamps: string[];
metrics: {[key: string]: number[]};
aggregate: boolean;
} = $props();
// Make load_graph reactive
$effect(() => {load_graph(metric, window, interval, aggregate)})
let chart: Chart = $state()
let chartTimeout: NodeJS.Timeout = null
const load_graph = async (_metric: string, _window: number, _interval: number, _aggregate: boolean) => {
if (chartTimeout !== null) { clearTimeout(chartTimeout) }
chartTimeout = setTimeout(() => { load_graph(metric, window, interval, aggregate) }, 10000)
// Make load_graph reactive
$effect(() => {update_chart(timestamps, metrics, aggregate)})
let today = new Date()
let start = new Date()
start.setMinutes(start.getMinutes() - _window)
try {
const metrics = await get_host_metrics(start, today, _metric, _interval)
// Format the dates
metrics.timestamps.forEach((val: string, idx: number) => {
metrics.timestamps[idx] = formatDate(val, true, true, true)
});
chart.data().labels = metrics.timestamps;
// If the dataset uses the duration type, we need to convert the values
// to milliseconds
if (data_type === "duration") {
for (const host of Object.keys(metrics.host_amounts)) {
for (let i = 0; i < metrics.host_amounts[host].length; i++) {
// Go durations are expressed on nanoseconds, divide by 1
// million to convert to milliseconds
metrics.host_amounts[host][i] /= 1000000
}
}
}
const update_chart = async (timestamps: string[], metrics: {[key: string]: number[]}, aggregate: boolean) => {
chart.data().labels = [...timestamps];
// Truncate the datasets array in case we have more datasets cached than
// there are in the response
chart.data().datasets.length = Object.keys(metrics.host_amounts).length
chart.data().datasets.length = Object.keys(metrics).length
let i = 0
if (_aggregate) {
if (aggregate === true) {
i = 1
chart.data().datasets[0] = {
label: "aggregate",
@@ -71,7 +41,7 @@ const load_graph = async (_metric: string, _window: number, _interval: number, _
}
}
for (const host of Object.keys(metrics.host_amounts).sort()) {
for (const host of Object.keys(metrics).sort()) {
if (chart.data().datasets[i] === undefined) {
chart.data().datasets[i] = {
label: "",
@@ -83,39 +53,25 @@ const load_graph = async (_metric: string, _window: number, _interval: number, _
chart.data().datasets[i].label = await host_label(host)
chart.data().datasets[i].borderColor = host_colour(host)
chart.data().datasets[i].backgroundColor = host_colour(host)
chart.data().datasets[i].data = metrics.host_amounts[host]
chart.data().datasets[i].data = [...metrics[host]]
i++
}
chart.update()
} catch (error) {
alert(error)
}
}
const create_aggregate_dataset = (metrics: HostMetrics): number[] => {
const create_aggregate_dataset = (hosts: {[key:string]: number[]}): number[] => {
let data: number[] = []
for (const host of Object.keys(metrics.host_amounts)) {
for (let idx = 0; idx < metrics.host_amounts[host].length; idx++) {
for (const host of Object.keys(hosts)) {
for (let idx = 0; idx < hosts[host].length; idx++) {
if (data[idx]===undefined) {
data[idx] = 0
}
data[idx] += metrics.host_amounts[host][idx]
data[idx] += hosts[host][idx]
}
}
return data
}
onMount(() => {
load_graph(metric, window, interval, aggregate);
return () => {
if (chartTimeout !== null) {
clearTimeout(chartTimeout)
}
}
})
</script>
<div>

View File

@@ -1,8 +1,26 @@
import { loading_finish, loading_start } from "lib/Loading";
import { get_endpoint } from "lib/PixeldrainAPI";
import { check_response, get_endpoint } from "lib/PixeldrainAPI";
import hsl2rgb from "pure-color/convert/hsl2rgb";
import rgb2hex from "pure-color/convert/rgb2hex";
export type HostMetrics = {
timestamps: string[]
// First key is the requested metric, second key is the host ID
metrics: { [key: string]: { [key: string]: number[] } }
}
export const get_host_metrics = async (start: Date, end: Date, metrics: string[], interval: number): Promise<HostMetrics> => {
return await check_response(
await fetch(
get_endpoint() + "/admin/host_metrics" +
"?start=" + start.toISOString() +
"&end=" + end.toISOString() +
"&metrics=" + metrics.join(",") +
"&interval=" + interval
)
) as HostMetrics
};
let host_colours: { [key: string]: string } = {}
export const host_colour = (id: string): string => {
let host_count: number = Object.keys(host_colours).length
@@ -12,11 +30,18 @@ export const host_colour = (id: string): string => {
return host_colours[id]
}
const colour_interval = 360 / (host_count + 1)
// Divide the colour wheel by the number of hosts we need to colour. We cap
// the steps at 24 hue, which allows for 15 colours
const colour_interval = Math.max(360 / (host_count + 1), 24)
var i = 0
for (const host of Object.keys(host_colours).sort()) {
host_colours[host] = rgb2hex(hsl2rgb([i * colour_interval, 100, 70]))
const hue = (i * colour_interval) % 360
// Lightness decreases with 10 for each cycle of the colour wheel
const lightness = 80 - (Math.floor((i * colour_interval) / 360) * 10)
host_colours[host] = rgb2hex(hsl2rgb([hue, 100, lightness]))
i++
}

View File

@@ -27,20 +27,3 @@ export const get_admin_invoices = async (year: number, month: number) => {
)
) as Invoice[]
};
export type HostMetrics = {
timestamps: string[]
host_amounts: { [key: string]: number[] }
}
export const get_host_metrics = async (start: Date, end: Date, metric: string, interval: number): Promise<HostMetrics> => {
return await check_response(
await fetch(
get_endpoint() + "/admin/host_metrics" +
"?start=" + start.toISOString() +
"&end=" + end.toISOString() +
"&metric=" + metric +
"&interval=" + interval
)
) as HostMetrics
};

View File

@@ -15,6 +15,7 @@ export type User = {
subscription: Subscription,
storage_space_used: number,
filesystem_storage_used: number,
filesystem_node_count: number,
is_admin: boolean,
balance_micro_eur: number,
hotlinking_enabled: boolean,
@@ -138,7 +139,7 @@ export const logout_user = async (redirect_path: string) => {
{ method: "DELETE" },
))
document.cookie = "pd_auth_key=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;";
// document.cookie = "pd_auth_key=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;";
window.location.pathname = redirect_path
}