-
Notifications
You must be signed in to change notification settings - Fork 593
Expand file tree
/
Copy pathbootstrap.sh
More file actions
executable file
·338 lines (299 loc) · 10.8 KB
/
bootstrap.sh
File metadata and controls
executable file
·338 lines (299 loc) · 10.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
#!/usr/bin/env bash
source $(git rev-parse --show-toplevel)/ci3/source_bootstrap
function hash {
hash_str \
$(../noir/bootstrap.sh hash) \
$(../barretenberg/bootstrap.sh hash) \
$(cache_content_hash ../{avm-transpiler,noir-projects,l1-contracts,yarn-project}/.rebuild_patterns)
}
function compile_project {
# TODO: 16 jobs is magic. Was seeing weird errors otherwise.
parallel -j16 --line-buffered --tag 'cd {} && ../node_modules/.bin/swc src -d dest --config-file=../.swcrc --strip-leading-paths' "$@"
}
# Returns a list of project paths to compile/lint/publish.
# Ensure exclusions are matching in both cases.
function get_projects {
if [ "${1:-}" == 'topological' ]; then
yarn workspaces foreach --topological-dev -A \
--exclude @aztec/aztec3-packages \
--exclude @aztec/scripts \
exec 'echo $(pwd)' | cat | grep -v "Done"
else
dirname */src | xargs realpath
fi
}
function format {
local arg="-w"
local packages=()
# Parse all arguments
while [ $# -gt 0 ]; do
case "$1" in
--check)
arg="--check"
;;
-w|--write)
arg="-w"
;;
-*)
echo "Unknown flag: $1" >&2
return 1
;;
*)
packages+=("$1")
;;
esac
shift
done
# Build the paths array to search
local paths=()
if [ ${#packages[@]} -eq 0 ]; then
paths=(./*/src)
else
for pkg in "${packages[@]}"; do
if [ ! -d "./$pkg/src" ]; then
echo "Error: Package '$pkg' not found or has no src directory" >&2
return 1
fi
paths+=("./$pkg/src")
done
fi
find "${paths[@]}" -type f -regex '.*\.\(json\|js\|mjs\|cjs\|ts\)$' | \
parallel -N30 ./node_modules/.bin/prettier --log-level warn "$arg"
}
function lint {
local arg="--fix"
local packages=()
# Parse all arguments
while [ $# -gt 0 ]; do
case "$1" in
--check)
arg=""
;;
--fix)
arg="--fix"
;;
-*)
echo "Unknown flag: $1" >&2
return 1
;;
*)
packages+=("$1")
;;
esac
shift
done
if [ ${#packages[@]} -gt 0 ]; then
# Validate packages exist
for pkg in "${packages[@]}"; do
if [ ! -d "./$pkg/src" ]; then
echo "Error: Package '$pkg' not found or has no src directory" >&2
return 1
fi
done
# Lint specified packages in parallel (use at most half of CPU cores)
printf '%s\n' "${packages[@]}" | parallel -j 50% "cd {} && ../node_modules/.bin/eslint --cache $arg ./src"
else
# Lint all packages in parallel (use at most half of CPU cores)
get_projects | parallel -j 50% "cd {} && ../node_modules/.bin/eslint --cache $arg ./src"
fi
}
function compile_all_projects {
get_projects | compile_project
}
function compile_all {
set -euo pipefail
local hash=$(hash)
if cache_download yarn-project-$hash.tar.gz; then
return
fi
compile_project ::: constants foundation stdlib blob-lib builder ethereum l1-artifacts
# Call all projects that have a generation stage.
parallel --joblog joblog.txt --line-buffered --tag 'cd {} && yarn generate' ::: \
accounts \
aztec.js \
cli \
ethereum \
slasher \
stdlib \
ivc-integration \
l1-artifacts \
noir-contracts.js \
noir-test-contracts.js \
noir-protocol-circuits-types \
protocol-contracts \
pxe
cat joblog.txt
get_projects | compile_project
# Run oracle version check for pxe after compilation
cd pxe && yarn check_oracle_version
cd ..
cmds=('format --check' 'yarn tsgo -b --emitDeclarationOnly')
if [ "${CI:-0}" -eq 1 ]; then
cmds+=('lint --check')
fi
parallel --joblog joblog.txt --tag denoise ::: "${cmds[@]}"
cat joblog.txt
if [ "$CI" -eq 1 ]; then
cache_upload "yarn-project-$hash.tar.gz" $(git ls-files --others --ignored --exclude-standard | grep -v '^node_modules/')
fi
}
export -f compile_project format lint get_projects compile_all hash
function build {
echo_header "yarn-project build"
denoise "./bootstrap.sh clean-lite"
npm_install_deps ../noir
denoise "compile_all"
}
function test_cmds {
local hash=$(hash)
# Exclusions:
# end-to-end: e2e tests handled separately with end-to-end/bootstrap.sh.
# kv-store: Uses mocha so will need different treatment.
for test in !(end-to-end|kv-store|aztec)/src/**/*.test.ts; do
# Skip benchmarks here.
[[ "$test" =~ \.bench\.test\.ts$ ]] && continue
local prefix=$hash
local cmd_env=""
# These need isolation due to network stack usage (p2p, anvil, etc).
if [[ "$test" =~ ^(prover-node|p2p|ethereum|aztec|prover-client/src/test|stdlib/src/l1-contracts|ivc-integration/src/chonk_browser|blob-client/src/server) ]]; then
prefix+=":ISOLATE=1:NAME=$test"
fi
if [[ "$test" =~ ^ivc-integration/src/chonk_browser ]]; then
prefix+=":NET=1"
fi
# Boost some tests resources.
if [[ "$test" =~ testbench ]]; then
prefix+=":CPUS=10:MEM=16g"
elif [[ "$test" =~ avm_proving_tests || "$test" =~ rollup_ivc_integration || "$test" =~ avm_integration ]]; then
prefix+=":CPUS=16:MEM=16g"
elif [[ "$test" =~ ^ivc-integration/ ]]; then
prefix+=":CPUS=8"
fi
# Add debug logging for tests that require a bit more info
if [[ "$test" == p2p/src/client/p2p_client.test.ts || "$test" == p2p/src/services/discv5/discv5_service.test.ts || "$test" == p2p/src/client/p2p_client.integration.test.ts ]]; then
cmd_env+=" LOG_LEVEL=debug"
elif [[ "$test" =~ rollup_ivc_integration || "$test" =~ avm_integration ]]; then
cmd_env+=" LOG_LEVEL=debug BB_VERBOSE=1 "
elif [[ "$test" =~ e2e_p2p ]]; then
cmd_env+=" LOG_LEVEL='verbose; debug:p2p'"
fi
# Enable real proofs in prover-client integration tests only on CI full.
if [[ "$test" =~ ^prover-client/src/test/ ]]; then
if [ "$CI_FULL" -eq 1 ]; then
prefix+=":CPUS=16:MEM=96g"
cmd_env+=" LOG_LEVEL=verbose HARDWARE_CONCURRENCY=16"
else
cmd_env+=" FAKE_PROOFS=1"
fi
fi
echo "${prefix}${cmd_env} yarn-project/scripts/run_test.sh $test"
done
# Uses mocha for browser tests, so we have to treat it differently.
echo "$hash:ISOLATE=1 cd yarn-project/kv-store && yarn test"
# Aztec CLI tests
aztec/bootstrap.sh test_cmds
if [[ "${TARGET_BRANCH:-}" =~ ^(v[0-9]+(-next)?|backport-to-v[0-9]+-(staging|next))$ ]]; then
echo "$hash yarn-project/scripts/run_test.sh aztec/src/testnet_compatibility.test.ts"
echo "$hash yarn-project/scripts/run_test.sh aztec/src/mainnet_compatibility.test.ts"
fi
}
function test {
echo_header "yarn-project test"
test_cmds | filter_test_cmds | parallelize
}
function bench_cmds {
local hash=$(hash)
echo "$hash BENCH_OUTPUT=bench-out/sim.bench.json yarn-project/scripts/run_test.sh simulator/src/public/public_tx_simulator/apps_tests/bench.test.ts"
echo "$hash BENCH_OUTPUT=bench-out/native_world_state.bench.json yarn-project/scripts/run_test.sh world-state/src/native/native_bench.test.ts"
echo "$hash BENCH_OUTPUT=bench-out/kv_store.bench.json yarn-project/scripts/run_test.sh kv-store/src/bench/map_bench.test.ts"
echo "$hash BENCH_OUTPUT=bench-out/tx_pool.bench.json yarn-project/scripts/run_test.sh p2p/src/mem_pools/tx_pool/tx_pool_bench.test.ts"
echo "$hash BENCH_OUTPUT=bench-out/tx_pool_v2.bench.json yarn-project/scripts/run_test.sh p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_bench.test.ts"
echo "$hash BENCH_OUTPUT=bench-out/tx_validator.bench.json yarn-project/scripts/run_test.sh p2p/src/msg_validators/tx_validator/tx_validator_bench.test.ts"
echo "$hash:ISOLATE=1:CPUS=16:MEM=32g:TIMEOUT=1200 BENCH_OUTPUT=bench-out/p2p_client_proposal_tx_collector.bench.json yarn-project/scripts/run_test.sh p2p/src/client/test/tx_proposal_collector/p2p_client.proposal_tx_collector.bench.test.ts"
echo "$hash BENCH_OUTPUT=bench-out/tx.bench.json yarn-project/scripts/run_test.sh stdlib/src/tx/tx_bench.test.ts"
echo "$hash:ISOLATE=1:CPUS=10:MEM=16g:LOG_LEVEL=silent BENCH_OUTPUT=bench-out/proving_broker.bench.json yarn-project/scripts/run_test.sh prover-client/src/test/proving_broker_testbench.test.ts"
echo "$hash:ISOLATE=1:CPUS=16:MEM=16g BENCH_OUTPUT=bench-out/avm_bulk_test.bench.json yarn-project/scripts/run_test.sh bb-prover/src/avm_proving_tests/avm_bulk.test.ts"
}
function release_packages {
echo "Computing packages to publish..."
local packages=$(get_projects topological)
# Strip platform-specific solc binary from l1-artifacts before npm publish.
# Replace solc="./solc-X.Y.Z" with solc_version="X.Y.Z" so forge auto-downloads
# the correct binary via SVM on the end-user's machine.
local l1_artifacts="l1-artifacts/l1-contracts"
rm -f "$l1_artifacts"/solc-*
sed -i 's|^solc = "\./solc-\(.*\)"|solc_version = "\1"|' "$l1_artifacts/foundry.toml"
local package_list=()
for package in $packages; do
(cd $package && retry "deploy_npm $1 $2")
local package_name=$(jq -r .name "$package/package.json")
package_list+=("$package_name@$2")
done
# Smoke test the deployed packages.
local dir=$(mktemp -d)
cd "$dir"
do_or_dryrun npm init -y
# NOTE: originally this was on one line, but sometimes snagged downloading end-to-end (most recently published package).
for package in "${package_list[@]}"; do
retry "do_or_dryrun npm install $package"
done
rm -rf "$dir"
}
function release {
echo_header "yarn-project release"
release_packages "$(dist_tag)" "${REF_NAME#v}"
}
case "$cmd" in
"clean")
[ -n "${1:-}" ] && cd $1
git clean -fdx
;;
"clean-lite")
files=$(git ls-files --ignored --others --exclude-standard | grep -vE '(node_modules/|^\.yarn/)' || true)
if [ -n "$files" ]; then
echo "$files" | xargs rm -rf
fi
;;
"")
build
;;
"compile")
if [ -n "${1:-}" ]; then
compile_project ::: "$@"
else
get_projects | compile_project
fi
;;
instrumented_profile)
# Automatically hooks sites with benchmarking instrumentation.
if [ "$#" -gt 1 ]; then
echo "Usage: ./bootstrap.sh profile <command>"
exit 1
fi
cmd=$1
# Refuse to continue if there are uncommitted changes to tracked files.
if [ -n "$(git status --porcelain | grep -v '^??')" ]; then
echo "Please commit or stash your changes before running this command."
exit 1
fi
rm -f profile-*.json
echo "NOTE: If you interrupt this you may have a dirty git state or build state. Otherwise it will clean up."
( cd ./scripts/instrumenting-profiler && npm install )
./scripts/instrumenting-profiler/instrument.sh
denoise "./bootstrap.sh compile"
pwd=$(pwd)
cleanup_instrumentation() {
# we may have changed paths
git checkout "$pwd"
denoise "cd '$pwd' && ./bootstrap.sh compile"
for f in profile-*.json; do
echo "To print: ./scripts/instrumenting-profiler/print.mjs $(pwd)/$f"
done
}
trap cleanup_instrumentation EXIT
eval "$cmd"
;;
*)
default_cmd_handler "$@"
;;
esac