Skip to content

Commit dae8837

Browse files
mgrange1998meta-codesync[bot]
authored andcommitted
Rename parallelism to concurrency in Client and AxClient APIs (#4923)
Summary: Pull Request resolved: #4923 Renames the `parallelism` parameter to `concurrency` in `Client.run_trials()` and adds backward-compatible deprecated `max_parallelism` parameters in `AxClient.create_experiment()` and `AxClient.get_max_parallelism()` → `get_max_concurrency()`. Both include deprecation warnings guiding callers to use the new parameter names, with validation that old and new parameters are not specified simultaneously. Differential Revision: D93771849
1 parent ed6f4dc commit dae8837

2 files changed

Lines changed: 22 additions & 22 deletions

File tree

ax/service/ax_client.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -837,38 +837,38 @@ def get_trials_data_frame(self) -> pd.DataFrame:
837837
return self.experiment.to_df()
838838

839839
def get_max_parallelism(self) -> list[tuple[int, int]]:
840-
"""Retrieves maximum number of trials that can be scheduled in parallel
840+
"""Retrieves maximum number of trials that can be scheduled concurrently
841841
at different stages of optimization.
842842
843843
Some optimization algorithms profit significantly from sequential
844844
optimization (i.e. suggest a few points, get updated with data for them,
845845
repeat, see https://ax.dev/docs/bayesopt.html).
846-
Parallelism setting indicates how many trials should be running simulteneously
846+
Concurrency setting indicates how many trials should be running simultaneously
847847
(generated, but not yet completed with data).
848848
849849
The output of this method is mapping of form
850-
{num_trials -> max_parallelism_setting}, where the max_parallelism_setting
851-
is used for num_trials trials. If max_parallelism_setting is -1, as
852-
many of the trials can be ran in parallel, as necessary. If num_trials
853-
in a tuple is -1, then the corresponding max_parallelism_setting
850+
{num_trials -> max_concurrency_setting}, where the max_concurrency_setting
851+
is used for num_trials trials. If max_concurrency_setting is -1, as
852+
many of the trials can be ran concurrently, as necessary. If num_trials
853+
in a tuple is -1, then the corresponding max_concurrency_setting
854854
should be used for all subsequent trials.
855855
856856
For example, if the returned list is [(5, -1), (12, 6), (-1, 3)],
857-
the schedule could be: run 5 trials with any parallelism, run 6 trials in
858-
parallel twice, run 3 trials in parallel for as long as needed. Here,
857+
the schedule could be: run 5 trials with any concurrency, run 6 trials
858+
concurrently twice, run 3 trials concurrently for as long as needed. Here,
859859
'running' a trial means obtaining a next trial from `AxClient` through
860860
get_next_trials and completing it with data when available.
861861
862862
Returns:
863-
Mapping of form {num_trials -> max_parallelism_setting}.
863+
Mapping of form {num_trials -> max_concurrency_setting}.
864864
"""
865-
parallelism_settings = []
865+
concurrency_settings = []
866866
for node in self.generation_strategy._nodes:
867-
# Extract max_parallelism from MaxGenerationParallelism criterion
868-
max_parallelism = None
867+
# Extract max_concurrency from MaxGenerationParallelism criterion
868+
max_concurrency = None
869869
for tc in node.transition_criteria:
870870
if isinstance(tc, MaxGenerationParallelism):
871-
max_parallelism = tc.threshold
871+
max_concurrency = tc.threshold
872872
break
873873
# Try to get num_trials from the node. If there's no MinTrials
874874
# criterion (unlimited trials), num_trials will raise UserInputError.
@@ -877,13 +877,13 @@ def get_max_parallelism(self) -> list[tuple[int, int]]:
877877
num_trials = node.num_trials
878878
except UserInputError:
879879
num_trials = -1
880-
parallelism_settings.append(
880+
concurrency_settings.append(
881881
(
882882
num_trials,
883-
max_parallelism if max_parallelism is not None else num_trials,
883+
max_concurrency if max_concurrency is not None else num_trials,
884884
)
885885
)
886-
return parallelism_settings
886+
return concurrency_settings
887887

888888
def get_optimization_trace(
889889
self, objective_optimum: float | None = None

ax/service/tests/test_ax_client.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@
5050
UserInputError,
5151
)
5252
from ax.exceptions.generation_strategy import MaxParallelismReachedException
53-
from ax.generation_strategy.dispatch_utils import DEFAULT_BAYESIAN_PARALLELISM
53+
from ax.generation_strategy.dispatch_utils import DEFAULT_BAYESIAN_CONCURRENCY
5454
from ax.generation_strategy.generation_strategy import (
5555
GenerationNode,
5656
GenerationStep,
@@ -511,7 +511,7 @@ def test_default_generation_strategy_continuous(self) -> None:
511511
if i < 5:
512512
self.assertEqual(gen_limit, 5 - i)
513513
else:
514-
self.assertEqual(gen_limit, DEFAULT_BAYESIAN_PARALLELISM)
514+
self.assertEqual(gen_limit, DEFAULT_BAYESIAN_CONCURRENCY)
515515
parameterization, trial_index = ax_client.get_next_trial()
516516
x, y = parameterization.get("x"), parameterization.get("y")
517517
ax_client.complete_trial(
@@ -1616,14 +1616,14 @@ def test_keep_generating_without_data(self) -> None:
16161616
self.assertTrue(len(node0_min_trials) > 0)
16171617
self.assertFalse(node0_min_trials[0].block_gen_if_met)
16181618

1619-
# Check that max_parallelism is None by verifying no MaxGenerationParallelism
1619+
# Check that max_concurrency is None by verifying no MaxGenerationParallelism
16201620
# criterion exists on node 1
1621-
node1_max_parallelism = [
1621+
node1_max_concurrency = [
16221622
tc
16231623
for tc in ax_client.generation_strategy._nodes[1].transition_criteria
16241624
if isinstance(tc, MaxGenerationParallelism)
16251625
]
1626-
self.assertEqual(len(node1_max_parallelism), 0)
1626+
self.assertEqual(len(node1_max_concurrency), 0)
16271627

16281628
for _ in range(10):
16291629
ax_client.get_next_trial()
@@ -2872,7 +2872,7 @@ def test_estimate_early_stopping_savings(self) -> None:
28722872

28732873
self.assertEqual(ax_client.estimate_early_stopping_savings(), 0)
28742874

2875-
def test_max_parallelism_exception_when_early_stopping(self) -> None:
2875+
def test_max_concurrency_exception_when_early_stopping(self) -> None:
28762876
ax_client = AxClient()
28772877
ax_client.create_experiment(
28782878
parameters=[

0 commit comments

Comments
 (0)