Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions DEPENDENCIES.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,9 @@ base64: 0.22.1, "Apache-2.0 OR MIT",
base64ct: 1.8.3, "Apache-2.0 OR MIT",
bdd: 0.0.1, "Apache-2.0",
beef: 0.5.2, "Apache-2.0 OR MIT",
bench-dashboard-frontend: 0.5.0, "Apache-2.0",
bench-dashboard-frontend: 0.5.1-edge.1, "Apache-2.0",
bench-dashboard-shared: 0.1.0, "Apache-2.0",
bench-report: 0.2.2, "Apache-2.0",
bench-report: 0.2.3-edge.1, "Apache-2.0",
bench-runner: 0.1.0, "Apache-2.0",
bigdecimal: 0.4.10, "Apache-2.0 OR MIT",
bimap: 0.6.3, "Apache-2.0 OR MIT",
Expand Down Expand Up @@ -392,7 +392,7 @@ idna: 1.1.0, "Apache-2.0 OR MIT",
idna_adapter: 1.2.1, "Apache-2.0 OR MIT",
iggy: 0.9.0, "Apache-2.0",
iggy-bench: 0.4.0, "Apache-2.0",
iggy-bench-dashboard-server: 0.6.0, "Apache-2.0",
iggy-bench-dashboard-server: 0.6.1-edge.1, "Apache-2.0",
iggy-cli: 0.11.0, "Apache-2.0",
iggy-connectors: 0.3.0, "Apache-2.0",
iggy-mcp: 0.3.0, "Apache-2.0",
Expand Down
2 changes: 1 addition & 1 deletion core/bench/dashboard/frontend/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
[package]
name = "bench-dashboard-frontend"
license = "Apache-2.0"
version = "0.5.0"
version = "0.5.1-edge.1"
edition = "2024"

[package.metadata.cargo-machete]
Expand Down
2 changes: 1 addition & 1 deletion core/bench/dashboard/server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
[package]
name = "iggy-bench-dashboard-server"
license = "Apache-2.0"
version = "0.6.0"
version = "0.6.1-edge.1"
edition = "2024"

[dependencies]
Expand Down
2 changes: 1 addition & 1 deletion core/bench/report/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

[package]
name = "bench-report"
version = "0.2.2"
version = "0.2.3-edge.1"
edition = "2024"
description = "Benchmark report and chart generation library for iggy-bench binary and iggy-benchmarks-dashboard web app"
license = "Apache-2.0"
Expand Down
16 changes: 10 additions & 6 deletions core/bench/report/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,15 +52,15 @@ pub fn create_throughput_chart(

chart = chart.add_dual_time_line_series(
&format!("{} {} [MB/s]", actor_type, metrics.summary.actor_id),
metrics.throughput_mb_ts.as_charming_points(),
metrics.throughput_mb_ts.as_downsampled_charming_points(),
None,
0.4,
0,
1.0,
);
chart = chart.add_dual_time_line_series(
&format!("{} {} [msg/s]", actor_type, metrics.summary.actor_id),
metrics.throughput_msg_ts.as_charming_points(),
metrics.throughput_msg_ts.as_downsampled_charming_points(),
None,
0.4,
1,
Expand All @@ -77,15 +77,19 @@ pub fn create_throughput_chart(

chart = chart.add_dual_time_line_series(
&format!("All {}s [MB/s]", metrics.summary.kind.actor()),
metrics.avg_throughput_mb_ts.as_charming_points(),
metrics
.avg_throughput_mb_ts
.as_downsampled_charming_points(),
None,
1.0,
0,
2.0,
);
chart = chart.add_dual_time_line_series(
&format!("All {}s [msg/s]", metrics.summary.kind.actor()),
metrics.avg_throughput_msg_ts.as_charming_points(),
metrics
.avg_throughput_msg_ts
.as_downsampled_charming_points(),
None,
1.0,
1,
Expand Down Expand Up @@ -117,7 +121,7 @@ pub fn create_latency_chart(

chart = chart.add_time_series(
&format!("{} {} [ms]", actor_type, metrics.summary.actor_id),
metrics.latency_ts.as_charming_points(),
metrics.latency_ts.as_downsampled_charming_points(),
None,
0.3,
);
Expand All @@ -131,7 +135,7 @@ pub fn create_latency_chart(

chart = chart.add_dual_time_line_series(
&format!("Avg {}s [ms]", metrics.summary.kind.actor()),
metrics.avg_latency_ts.as_charming_points(),
metrics.avg_latency_ts.as_downsampled_charming_points(),
None,
1.0,
0,
Expand Down
11 changes: 11 additions & 0 deletions core/bench/report/src/types/time_series.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,22 @@ pub enum TimeSeriesKind {
Latency,
}

const MAX_CHART_POINTS: usize = 3000;

impl TimeSeries {
pub fn as_charming_points(&self) -> Vec<Vec<f64>> {
self.points
.iter()
.map(|p| vec![p.time_s, p.value])
.collect()
}

/// LTTB-downsampled points for chart rendering.
/// Caps output at `MAX_CHART_POINTS` to keep ECharts responsive.
pub fn as_downsampled_charming_points(&self) -> Vec<Vec<f64>> {
crate::utils::lttb_downsample(&self.points, MAX_CHART_POINTS)
.iter()
.map(|p| vec![p.time_s, p.value])
.collect()
}
}
140 changes: 139 additions & 1 deletion core/bench/report/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
* under the License.
*/

use crate::time_series::TimeSeries;
use crate::time_series::{TimePoint, TimeSeries};
use serde::Serializer;

pub(crate) fn round_float<S>(value: &f64, serializer: S) -> Result<S::Ok, S::Error>
Expand Down Expand Up @@ -48,6 +48,66 @@ pub fn max(series: &TimeSeries) -> Option<f64> {
.max_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
}

/// LTTB (Largest-Triangle-Three-Buckets) downsampling.
///
/// Reduces `points` to at most `threshold` points while preserving visual shape
/// (peaks, valleys, trends). Returns a clone if `points.len() <= threshold`.
pub fn lttb_downsample(points: &[TimePoint], threshold: usize) -> Vec<TimePoint> {
let len = points.len();
if len <= threshold || threshold < 3 {
return points.to_vec();
}

let bucket_count = threshold - 2;
let bucket_size = (len - 2) as f64 / bucket_count as f64;

let mut result = Vec::with_capacity(threshold);
result.push(points[0].clone());

let mut prev_selected = 0usize;

for bucket_idx in 0..bucket_count {
// Compute average of the *next* bucket (used as the third triangle vertex)
let next_start = ((bucket_idx + 1) as f64 * bucket_size) as usize + 1;
let next_end = (((bucket_idx + 2) as f64 * bucket_size) as usize + 1).min(len - 1);

let mut avg_time = 0.0;
let mut avg_value = 0.0;
let next_count = (next_end - next_start + 1) as f64;
for p in &points[next_start..=next_end] {
avg_time += p.time_s;
avg_value += p.value;
}
avg_time /= next_count;
avg_value /= next_count;

// Current bucket range
let cur_start = (bucket_idx as f64 * bucket_size) as usize + 1;
let cur_end = next_start;

// Pick the point in this bucket that forms the largest triangle with
// the previously selected point and the next-bucket average.
let prev = &points[prev_selected];
let mut max_area = -1.0;
let mut best = cur_start;
for (i, p) in points[cur_start..cur_end].iter().enumerate() {
let area = ((prev.time_s - avg_time) * (p.value - prev.value)
- (prev.time_s - p.time_s) * (avg_value - prev.value))
.abs();
if area > max_area {
max_area = area;
best = cur_start + i;
}
}

result.push(points[best].clone());
prev_selected = best;
}

result.push(points[len - 1].clone());
result
}

/// Calculate the standard deviation of values from a TimeSeries
///
/// Returns None if the TimeSeries has fewer than 2 points
Expand All @@ -73,3 +133,81 @@ pub fn std_dev(series: &TimeSeries) -> Option<f64> {

Some(variance.sqrt())
}

#[cfg(test)]
mod tests {
use super::*;

fn make_points(values: impl IntoIterator<Item = f64>) -> Vec<TimePoint> {
values
.into_iter()
.enumerate()
.map(|(i, v)| TimePoint::new(i as f64, v))
.collect()
}

#[test]
fn lttb_passthrough_when_below_threshold() {
let pts = make_points([1.0, 2.0, 3.0]);
let result = lttb_downsample(&pts, 5);
assert_eq!(result, pts);
}

#[test]
fn lttb_passthrough_when_equal_to_threshold() {
let pts = make_points([1.0, 2.0, 3.0, 4.0, 5.0]);
let result = lttb_downsample(&pts, 5);
assert_eq!(result, pts);
}

#[test]
fn lttb_reduces_count() {
let pts = make_points((0..10_000).map(|i| (i as f64).sin()));
let result = lttb_downsample(&pts, 100);
assert_eq!(result.len(), 100);
}

#[test]
fn lttb_preserves_endpoints() {
let pts = make_points((0..1000).map(|i| i as f64 * 0.1));
let result = lttb_downsample(&pts, 50);
assert_eq!(result.first().unwrap().time_s, pts.first().unwrap().time_s);
assert_eq!(result.last().unwrap().time_s, pts.last().unwrap().time_s);
}

#[test]
fn lttb_preserves_peaks() {
// Triangle wave with clear peaks at indices 50, 150, 250, ...
let pts: Vec<TimePoint> = (0..500)
.map(|i| {
let v = if (i / 50) % 2 == 0 {
(i % 50) as f64
} else {
(50 - i % 50) as f64
};
TimePoint::new(i as f64, v)
})
.collect();

let result = lttb_downsample(&pts, 100);
let result_values: Vec<f64> = result.iter().map(|p| p.value).collect();
let max_val = result_values
.iter()
.cloned()
.fold(f64::NEG_INFINITY, f64::max);
// LTTB should retain the peaks (value = 50) in the downsampled output
assert!(
(max_val - 50.0).abs() < f64::EPSILON,
"peak 50.0 not preserved, got max {max_val}"
);
}

#[test]
fn lttb_edge_cases() {
assert!(lttb_downsample(&[], 10).is_empty());
let one = make_points([42.0]);
assert_eq!(lttb_downsample(&one, 10), one);
let two = make_points([1.0, 2.0]);
assert_eq!(lttb_downsample(&two, 10), two);
}
}
Loading