Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions datafusion-vortex-partitioned/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# DataFusion + Vortex

Partitioned Vortex dataset, converted one-for-one from the 100 ClickBench Parquet files and queried with [`vortex-datafusion-cli`].

[`vortex-datafusion-cli`]: https://github.com/vortex-data/vortex-datafusion-cli

## Cookbook: Generate benchmark results

Follow the same EC2 setup used by [datafusion-partitioned](../datafusion-partitioned/README.md), then run:

```bash
cd ClickBench/datafusion-vortex-partitioned
bash benchmark.sh
```

The benchmark script builds `vortex-datafusion-cli`, downloads the partitioned Parquet files, converts each `partitioned/hits_N.parquet` file into exactly one `vortex/hits_N.vortex` file, and runs the query set.

`benchmark.sh` checks out `vortex-datafusion-cli` tag `0.70.0-53.1.0`. CLI tags use `<vortex-version>-<df-version>`, where the first component is the `vortex-datafusion` crate version and the second is the DataFusion/DataFusion CLI version.

You can update/preview the results by running:

```bash
./make-json.sh <machine-name> # Example. ./make-json.sh c6a.xlarge
```

## Parquet to Vortex conversion

Each input file is converted independently through `vortex-datafusion-cli`:

```sql
CREATE EXTERNAL TABLE hits_parquet
STORED AS PARQUET
LOCATION 'partitioned/hits_0.parquet'
OPTIONS ('binary_as_string' 'true');

COPY (
SELECT * EXCEPT ("EventDate"),
CAST(CAST("EventDate" AS INTEGER) AS DATE) AS "EventDate"
FROM hits_parquet
) TO 'vortex/hits_0.vortex' STORED AS VORTEX;
```

`binary_as_string=true` handles the incorrect Parquet logical annotation before Vortex is written. The produced Vortex files store those fields as strings, so benchmark reads use only the Vortex table registration.
72 changes: 72 additions & 0 deletions datafusion-vortex-partitioned/benchmark.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
#!/bin/bash

set -Eeuo pipefail

export HOME=${HOME:=~}
# <vortex-version>-<df-version>
CLI_TAG=0.70.0-53.1.0
WITH_SWAP=false

if [ "$(free -g | awk '/^Mem:/{print $2}')" -lt 12 ]; then
echo "LOW MEMORY MODE"
if [ "$(swapon --noheadings --show | wc -l)" -eq 0 ]; then
echo "Enabling 8G swap"
sudo fallocate -l 8G /swapfile
sudo chmod 600 /swapfile
sudo mkswap /swapfile
sudo swapon /swapfile
WITH_SWAP=true
fi
fi

echo "Install Rust"
if ! command -v cargo >/dev/null 2>&1; then
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > rust-init.sh
bash rust-init.sh -y
fi
source "$HOME/.cargo/env"

echo "Install dependencies"
sudo apt-get update -y
sudo apt-get install -y build-essential cmake pkg-config time

echo "Install vortex-datafusion-cli"
rm -rf vortex-datafusion-cli
git clone https://github.com/vortex-data/vortex-datafusion-cli.git
cd vortex-datafusion-cli
git checkout "$CLI_TAG"
CARGO_PROFILE_RELEASE_LTO=true RUSTFLAGS="-C codegen-units=1" cargo build --release --bin vortex-datafusion-cli
export PATH="$(pwd)/target/release:$PATH"
cd ..

echo "Download benchmark target data, partitioned"
mkdir -p partitioned
seq 0 99 | xargs -P100 -I{} bash -c 'wget --directory-prefix partitioned --continue --progress=dot:giga https://datasets.clickhouse.com/hits_compatible/athena_partitioned/hits_{}.parquet'

echo "Convert Parquet to Vortex"
rm -rf vortex
mkdir -p vortex
if ! /usr/bin/time -f '%e' -o load-time.txt bash -c '
set -Eeuo pipefail
seq 0 99 | xargs -P"$(nproc)" -I{} ./convert.sh "partitioned/hits_{}.parquet" "vortex/hits_{}.vortex"
' > convert.log 2>&1; then
cat convert.log
exit 1
fi
VORTEX_FILES=$(find vortex -maxdepth 1 -name 'hits_*.vortex' | wc -l)
if [ "$VORTEX_FILES" -ne 100 ]; then
echo "Expected 100 Vortex files, found $VORTEX_FILES" >&2
exit 1
fi
echo "Load time: $(cat load-time.txt)"

echo "Run benchmarks for partitioned"
./run.sh

echo "Data size: $(du -bcs vortex/*.vortex | grep total)"

if [ "$WITH_SWAP" = true ]; then
echo "Disable swap"
sudo swapoff /swapfile
sudo rm /swapfile
fi
21 changes: 21 additions & 0 deletions datafusion-vortex-partitioned/convert.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#!/bin/bash

set -Eeuo pipefail

if [ "$#" -ne 2 ]; then
echo "Usage: $0 <input.parquet> <output.vortex>" >&2
exit 2
fi

INPUT=$1
OUTPUT=$2

mkdir -p "$(dirname "$OUTPUT")"
rm -f "$OUTPUT"

vortex-datafusion-cli -q \
-c "SET datafusion.execution.target_partitions = 1;" \
-c "CREATE EXTERNAL TABLE hits_parquet STORED AS PARQUET LOCATION '$INPUT' OPTIONS ('binary_as_string' 'true');" \
-c "COPY (SELECT * EXCEPT (\"EventDate\"), CAST(CAST(\"EventDate\" AS INTEGER) AS DATE) AS \"EventDate\" FROM hits_parquet) TO '$OUTPUT' STORED AS VORTEX;"

test -f "$OUTPUT"
3 changes: 3 additions & 0 deletions datafusion-vortex-partitioned/create.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
CREATE EXTERNAL TABLE hits
STORED AS VORTEX
LOCATION 'vortex';
41 changes: 41 additions & 0 deletions datafusion-vortex-partitioned/make-json.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
#!/bin/bash

# This script converts the raw `result.csv` data from `benchmark.sh` into the
# final json format used by the benchmark dashboard.
#
# usage : ./make-json.sh <machine>
#
# example (save results/c6a.4xlarge.json)
# ./make-json.sh c6a.4xlarge

MACHINE=$1
OUTPUT_FILE="results/${MACHINE}.json"
SYSTEM_NAME="DataFusion (Vortex, partitioned)"
DATE=$(date +%Y-%m-%d)
LOAD_TIME=${LOAD_TIME:-$(cat load-time.txt 2>/dev/null || echo null)}
DATA_SIZE=${DATA_SIZE:-$(du -bcs vortex/*.vortex 2>/dev/null | awk '/total/ { print $1 }')}
DATA_SIZE=${DATA_SIZE:-null}

mkdir -p results

# Read the CSV and build the result array using sed
RESULT_ARRAY=$(awk -F, '{arr[$1]=arr[$1]","$3} END {for (i=1;i<=length(arr);i++) {gsub(/^,/, "", arr[i]); printf " ["arr[i]"]"; if (i<length(arr)) printf ",\n"}}' result.csv)

# form the final JSON structure from the template
cat <<EOF > $OUTPUT_FILE
{
"system": "$SYSTEM_NAME",
"date": "$DATE",
"machine": "$MACHINE",
"cluster_size": 1,
"proprietary": "no",
"tuned": "no",
"hardware": "cpu",
"tags": ["Rust","column-oriented","embedded","stateless"],
"load_time": $LOAD_TIME,
"data_size": $DATA_SIZE,
"result": [
$RESULT_ARRAY
]
}
EOF
43 changes: 43 additions & 0 deletions datafusion-vortex-partitioned/queries.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
SELECT COUNT(*) FROM hits;
SELECT COUNT(*) FROM hits WHERE "AdvEngineID" <> 0;
SELECT SUM("AdvEngineID"), COUNT(*), AVG("ResolutionWidth") FROM hits;
SELECT AVG("UserID") FROM hits;
SELECT COUNT(DISTINCT "UserID") FROM hits;
SELECT COUNT(DISTINCT "SearchPhrase") FROM hits;
SELECT MIN("EventDate"), MAX("EventDate") FROM hits;
SELECT "AdvEngineID", COUNT(*) FROM hits WHERE "AdvEngineID" <> 0 GROUP BY "AdvEngineID" ORDER BY COUNT(*) DESC;
SELECT "RegionID", COUNT(DISTINCT "UserID") AS u FROM hits GROUP BY "RegionID" ORDER BY u DESC LIMIT 10;
SELECT "RegionID", SUM("AdvEngineID"), COUNT(*) AS c, AVG("ResolutionWidth"), COUNT(DISTINCT "UserID") FROM hits GROUP BY "RegionID" ORDER BY c DESC LIMIT 10;
SELECT "MobilePhoneModel", COUNT(DISTINCT "UserID") AS u FROM hits WHERE "MobilePhoneModel" <> '' GROUP BY "MobilePhoneModel" ORDER BY u DESC LIMIT 10;
SELECT "MobilePhone", "MobilePhoneModel", COUNT(DISTINCT "UserID") AS u FROM hits WHERE "MobilePhoneModel" <> '' GROUP BY "MobilePhone", "MobilePhoneModel" ORDER BY u DESC LIMIT 10;
SELECT "SearchPhrase", COUNT(*) AS c FROM hits WHERE "SearchPhrase" <> '' GROUP BY "SearchPhrase" ORDER BY c DESC LIMIT 10;
SELECT "SearchPhrase", COUNT(DISTINCT "UserID") AS u FROM hits WHERE "SearchPhrase" <> '' GROUP BY "SearchPhrase" ORDER BY u DESC LIMIT 10;
SELECT "SearchEngineID", "SearchPhrase", COUNT(*) AS c FROM hits WHERE "SearchPhrase" <> '' GROUP BY "SearchEngineID", "SearchPhrase" ORDER BY c DESC LIMIT 10;
SELECT "UserID", COUNT(*) FROM hits GROUP BY "UserID" ORDER BY COUNT(*) DESC LIMIT 10;
SELECT "UserID", "SearchPhrase", COUNT(*) FROM hits GROUP BY "UserID", "SearchPhrase" ORDER BY COUNT(*) DESC LIMIT 10;
SELECT "UserID", "SearchPhrase", COUNT(*) FROM hits GROUP BY "UserID", "SearchPhrase" LIMIT 10;
SELECT "UserID", extract(minute FROM to_timestamp_seconds("EventTime")) AS m, "SearchPhrase", COUNT(*) FROM hits GROUP BY "UserID", m, "SearchPhrase" ORDER BY COUNT(*) DESC LIMIT 10;
SELECT "UserID" FROM hits WHERE "UserID" = 435090932899640449;
SELECT COUNT(*) FROM hits WHERE "URL" LIKE '%google%';
SELECT "SearchPhrase", MIN("URL"), COUNT(*) AS c FROM hits WHERE "URL" LIKE '%google%' AND "SearchPhrase" <> '' GROUP BY "SearchPhrase" ORDER BY c DESC LIMIT 10;
SELECT "SearchPhrase", MIN("URL"), MIN("Title"), COUNT(*) AS c, COUNT(DISTINCT "UserID") FROM hits WHERE "Title" LIKE '%Google%' AND "URL" NOT LIKE '%.google.%' AND "SearchPhrase" <> '' GROUP BY "SearchPhrase" ORDER BY c DESC LIMIT 10;
SELECT * FROM hits WHERE "URL" LIKE '%google%' ORDER BY "EventTime" LIMIT 10;
SELECT "SearchPhrase" FROM hits WHERE "SearchPhrase" <> '' ORDER BY "EventTime" LIMIT 10;
SELECT "SearchPhrase" FROM hits WHERE "SearchPhrase" <> '' ORDER BY "SearchPhrase" LIMIT 10;
SELECT "SearchPhrase" FROM hits WHERE "SearchPhrase" <> '' ORDER BY "EventTime", "SearchPhrase" LIMIT 10;
SELECT "CounterID", AVG(length("URL")) AS l, COUNT(*) AS c FROM hits WHERE "URL" <> '' GROUP BY "CounterID" HAVING COUNT(*) > 100000 ORDER BY l DESC LIMIT 25;
SELECT REGEXP_REPLACE("Referer", '^https?://(?:www\.)?([^/]+)/.*$', '\1') AS k, AVG(length("Referer")) AS l, COUNT(*) AS c, MIN("Referer") FROM hits WHERE "Referer" <> '' GROUP BY k HAVING COUNT(*) > 100000 ORDER BY l DESC LIMIT 25;
SELECT SUM("ResolutionWidth"), SUM("ResolutionWidth" + 1), SUM("ResolutionWidth" + 2), SUM("ResolutionWidth" + 3), SUM("ResolutionWidth" + 4), SUM("ResolutionWidth" + 5), SUM("ResolutionWidth" + 6), SUM("ResolutionWidth" + 7), SUM("ResolutionWidth" + 8), SUM("ResolutionWidth" + 9), SUM("ResolutionWidth" + 10), SUM("ResolutionWidth" + 11), SUM("ResolutionWidth" + 12), SUM("ResolutionWidth" + 13), SUM("ResolutionWidth" + 14), SUM("ResolutionWidth" + 15), SUM("ResolutionWidth" + 16), SUM("ResolutionWidth" + 17), SUM("ResolutionWidth" + 18), SUM("ResolutionWidth" + 19), SUM("ResolutionWidth" + 20), SUM("ResolutionWidth" + 21), SUM("ResolutionWidth" + 22), SUM("ResolutionWidth" + 23), SUM("ResolutionWidth" + 24), SUM("ResolutionWidth" + 25), SUM("ResolutionWidth" + 26), SUM("ResolutionWidth" + 27), SUM("ResolutionWidth" + 28), SUM("ResolutionWidth" + 29), SUM("ResolutionWidth" + 30), SUM("ResolutionWidth" + 31), SUM("ResolutionWidth" + 32), SUM("ResolutionWidth" + 33), SUM("ResolutionWidth" + 34), SUM("ResolutionWidth" + 35), SUM("ResolutionWidth" + 36), SUM("ResolutionWidth" + 37), SUM("ResolutionWidth" + 38), SUM("ResolutionWidth" + 39), SUM("ResolutionWidth" + 40), SUM("ResolutionWidth" + 41), SUM("ResolutionWidth" + 42), SUM("ResolutionWidth" + 43), SUM("ResolutionWidth" + 44), SUM("ResolutionWidth" + 45), SUM("ResolutionWidth" + 46), SUM("ResolutionWidth" + 47), SUM("ResolutionWidth" + 48), SUM("ResolutionWidth" + 49), SUM("ResolutionWidth" + 50), SUM("ResolutionWidth" + 51), SUM("ResolutionWidth" + 52), SUM("ResolutionWidth" + 53), SUM("ResolutionWidth" + 54), SUM("ResolutionWidth" + 55), SUM("ResolutionWidth" + 56), SUM("ResolutionWidth" + 57), SUM("ResolutionWidth" + 58), SUM("ResolutionWidth" + 59), SUM("ResolutionWidth" + 60), SUM("ResolutionWidth" + 61), SUM("ResolutionWidth" + 62), SUM("ResolutionWidth" + 63), SUM("ResolutionWidth" + 64), SUM("ResolutionWidth" + 65), SUM("ResolutionWidth" + 66), SUM("ResolutionWidth" + 67), SUM("ResolutionWidth" + 68), SUM("ResolutionWidth" + 69), SUM("ResolutionWidth" + 70), SUM("ResolutionWidth" + 71), SUM("ResolutionWidth" + 72), SUM("ResolutionWidth" + 73), SUM("ResolutionWidth" + 74), SUM("ResolutionWidth" + 75), SUM("ResolutionWidth" + 76), SUM("ResolutionWidth" + 77), SUM("ResolutionWidth" + 78), SUM("ResolutionWidth" + 79), SUM("ResolutionWidth" + 80), SUM("ResolutionWidth" + 81), SUM("ResolutionWidth" + 82), SUM("ResolutionWidth" + 83), SUM("ResolutionWidth" + 84), SUM("ResolutionWidth" + 85), SUM("ResolutionWidth" + 86), SUM("ResolutionWidth" + 87), SUM("ResolutionWidth" + 88), SUM("ResolutionWidth" + 89) FROM hits;
SELECT "SearchEngineID", "ClientIP", COUNT(*) AS c, SUM("IsRefresh"), AVG("ResolutionWidth") FROM hits WHERE "SearchPhrase" <> '' GROUP BY "SearchEngineID", "ClientIP" ORDER BY c DESC LIMIT 10;
SELECT "WatchID", "ClientIP", COUNT(*) AS c, SUM("IsRefresh"), AVG("ResolutionWidth") FROM hits WHERE "SearchPhrase" <> '' GROUP BY "WatchID", "ClientIP" ORDER BY c DESC LIMIT 10;
SELECT "WatchID", "ClientIP", COUNT(*) AS c, SUM("IsRefresh"), AVG("ResolutionWidth") FROM hits GROUP BY "WatchID", "ClientIP" ORDER BY c DESC LIMIT 10;
SELECT "URL", COUNT(*) AS c FROM hits GROUP BY "URL" ORDER BY c DESC LIMIT 10;
SELECT 1, "URL", COUNT(*) AS c FROM hits GROUP BY 1, "URL" ORDER BY c DESC LIMIT 10;
SELECT "ClientIP", "ClientIP" - 1, "ClientIP" - 2, "ClientIP" - 3, COUNT(*) AS c FROM hits GROUP BY "ClientIP", "ClientIP" - 1, "ClientIP" - 2, "ClientIP" - 3 ORDER BY c DESC LIMIT 10;
SELECT "URL", COUNT(*) AS PageViews FROM hits WHERE "CounterID" = 62 AND "EventDate" >= '2013-07-01' AND "EventDate" <= '2013-07-31' AND "DontCountHits" = 0 AND "IsRefresh" = 0 AND "URL" <> '' GROUP BY "URL" ORDER BY PageViews DESC LIMIT 10;
SELECT "Title", COUNT(*) AS PageViews FROM hits WHERE "CounterID" = 62 AND "EventDate" >= '2013-07-01' AND "EventDate" <= '2013-07-31' AND "DontCountHits" = 0 AND "IsRefresh" = 0 AND "Title" <> '' GROUP BY "Title" ORDER BY PageViews DESC LIMIT 10;
SELECT "URL", COUNT(*) AS PageViews FROM hits WHERE "CounterID" = 62 AND "EventDate" >= '2013-07-01' AND "EventDate" <= '2013-07-31' AND "IsRefresh" = 0 AND "IsLink" <> 0 AND "IsDownload" = 0 GROUP BY "URL" ORDER BY PageViews DESC LIMIT 10 OFFSET 1000;
SELECT "TraficSourceID", "SearchEngineID", "AdvEngineID", CASE WHEN ("SearchEngineID" = 0 AND "AdvEngineID" = 0) THEN "Referer" ELSE '' END AS Src, "URL" AS Dst, COUNT(*) AS PageViews FROM hits WHERE "CounterID" = 62 AND "EventDate" >= '2013-07-01' AND "EventDate" <= '2013-07-31' AND "IsRefresh" = 0 GROUP BY "TraficSourceID", "SearchEngineID", "AdvEngineID", Src, Dst ORDER BY PageViews DESC LIMIT 10 OFFSET 1000;
SELECT "URLHash", "EventDate", COUNT(*) AS PageViews FROM hits WHERE "CounterID" = 62 AND "EventDate" >= '2013-07-01' AND "EventDate" <= '2013-07-31' AND "IsRefresh" = 0 AND "TraficSourceID" IN (-1, 6) AND "RefererHash" = 3594120000172545465 GROUP BY "URLHash", "EventDate" ORDER BY PageViews DESC LIMIT 10 OFFSET 100;
SELECT "WindowClientWidth", "WindowClientHeight", COUNT(*) AS PageViews FROM hits WHERE "CounterID" = 62 AND "EventDate" >= '2013-07-01' AND "EventDate" <= '2013-07-31' AND "IsRefresh" = 0 AND "DontCountHits" = 0 AND "URLHash" = 2868770270353813622 GROUP BY "WindowClientWidth", "WindowClientHeight" ORDER BY PageViews DESC LIMIT 10 OFFSET 10000;
SELECT DATE_TRUNC('minute', to_timestamp_seconds("EventTime")) AS M, COUNT(*) AS PageViews FROM hits WHERE "CounterID" = 62 AND "EventDate" >= '2013-07-14' AND "EventDate" <= '2013-07-15' AND "IsRefresh" = 0 AND "DontCountHits" = 0 GROUP BY DATE_TRUNC('minute', to_timestamp_seconds("EventTime")) ORDER BY DATE_TRUNC('minute', M) LIMIT 10 OFFSET 1000;
57 changes: 57 additions & 0 deletions datafusion-vortex-partitioned/results/c6a.2xlarge.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
{
"system": "DataFusion (Vortex, partitioned)",
"date": "2026-05-05",
"machine": "c6a.2xlarge",
"cluster_size": 1,
"proprietary": "no",
"tuned": "no",
"hardware": "cpu",
"tags": ["Rust","column-oriented","embedded","stateless"],
"load_time": 108.58,
"data_size": 15328662856,
"result": [
[0.078,0.002,0.002],
[0.170,0.027,0.028],
[0.214,0.072,0.069],
[0.650,0.071,0.070],
[1.367,0.840,0.837],
[1.355,0.785,0.786],
[0.075,0.002,0.002],
[0.179,0.031,0.033],
[1.269,1.045,1.044],
[1.670,1.251,1.237],
[0.770,0.162,0.156],
[1.099,0.192,0.191],
[1.603,0.694,0.682],
[3.280,1.199,1.198],
[1.395,0.667,0.670],
[1.108,0.948,0.942],
[3.158,1.772,1.779],
[3.122,1.778,1.785],
[5.120,3.531,3.507],
[0.313,0.043,0.048],
[15.675,0.906,0.901],
[17.831,0.929,0.926],
[22.767,1.105,1.103],
[22.582,1.521,1.574],
[0.319,0.079,0.075],
[1.603,0.146,0.147],
[0.609,0.081,0.082],
[16.343,1.328,1.365],
[15.637,15.200,15.211],
[0.814,0.656,0.668],
[2.796,0.592,0.595],
[5.885,0.641,0.629],
[3.929,3.007,3.019],
[16.025,3.545,3.512],
[15.999,3.567,3.503],
[1.455,1.293,1.298],
[0.254,0.074,0.073],
[0.203,0.034,0.034],
[0.243,0.024,0.022],
[0.386,0.130,0.129],
[0.247,0.019,0.016],
[0.249,0.015,0.015],
[0.242,0.014,0.015]
]
}
57 changes: 57 additions & 0 deletions datafusion-vortex-partitioned/results/c6a.4xlarge.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
{
"system": "DataFusion (Vortex, partitioned)",
"date": "2026-05-05",
"machine": "c6a.4xlarge",
"cluster_size": 1,
"proprietary": "no",
"hardware": "cpu",
"tuned": "no",
"tags": ["Rust","column-oriented","embedded","stateless"],
"load_time": 99.94,
"data_size": 15328662856,
"result": [
[0.082, 0.002, 0.002],
[0.152, 0.030, 0.027],
[0.175, 0.059, 0.060],
[0.633, 0.087, 0.089],
[1.306, 0.628, 0.626],
[1.316, 0.603, 0.592],
[0.090, 0.002, 0.002],
[0.166, 0.031, 0.030],
[1.223, 0.786, 0.772],
[1.664, 0.861, 0.871],
[0.729, 0.131, 0.133],
[1.116, 0.148, 0.147],
[1.605, 0.581, 0.578],
[3.174, 1.070, 1.068],
[1.527, 0.610, 0.597],
[0.887, 0.727, 0.715],
[3.174, 1.509, 1.532],
[3.153, 1.510, 1.506],
[4.788, 2.907, 2.827],
[0.313, 0.048, 0.049],
[15.848, 0.537, 0.528],
[17.859, 0.781, 0.772],
[22.900, 0.894, 0.878],
[21.132, 0.858, 0.785],
[0.287, 0.084, 0.091],
[1.607, 0.153, 0.146],
[0.757, 0.088, 0.088],
[16.230, 0.974, 1.011],
[13.690, 8.122, 8.151],
[0.512, 0.369, 0.364],
[2.788, 0.487, 0.487],
[5.882, 0.592, 0.589],
[3.929, 2.653, 2.674],
[15.934, 3.107, 2.890],
[15.951, 2.902, 2.918],
[1.060, 0.930, 0.917],
[0.261, 0.082, 0.082],
[0.208, 0.035, 0.036],
[0.197, 0.024, 0.023],
[0.385, 0.146, 0.146],
[0.251, 0.017, 0.016],
[0.248, 0.019, 0.014],
[0.244, 0.015, 0.015]
]
}
Loading