-
Notifications
You must be signed in to change notification settings - Fork 1.3k
Expand file tree
/
Copy pathtuner.py
More file actions
1531 lines (1340 loc) · 69.5 KB
/
tuner.py
File metadata and controls
1531 lines (1340 loc) · 69.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Placeholder docstring"""
from __future__ import absolute_import
import logging
from enum import Enum
from typing import Dict, List, Optional, Union
from sagemaker.core.analytics import HyperparameterTuningJobAnalytics
from sagemaker.core.jumpstart.utils import (
add_jumpstart_uri_tags,
get_jumpstart_base_name_if_jumpstart_model,
)
from sagemaker.core.parameter import (
CategoricalParameter,
ContinuousParameter,
IntegerParameter,
ParameterRange,
)
from sagemaker.core.shapes import (
HyperParameterTuningJobWarmStartConfig,
HyperParameterTuningJobStrategyConfig,
HyperParameterTuningInstanceConfig,
TuningJobCompletionCriteria,
Channel,
)
from sagemaker.core.resources import HyperParameterTuningJob
from sagemaker.core.common_utils import (
Tags,
base_from_name,
base_name_from_image,
format_tags,
name_from_base,
to_string,
)
from sagemaker.core.helper.pipeline_variable import PipelineVariable
from sagemaker.core.workflow.pipeline_context import PipelineSession, runnable_by_pipeline
# Lazy import to avoid circular dependency - ModelTrainer imports from core
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from sagemaker.train.model_trainer import ModelTrainer
from sagemaker.core.training.configs import InputData
from sagemaker.core.training.utils import _is_valid_s3_uri
HYPERPARAMETER_TUNING_JOB_NAME = "HyperParameterTuningJobName"
PARENT_HYPERPARAMETER_TUNING_JOBS = "ParentHyperParameterTuningJobs"
WARM_START_TYPE = "WarmStartType"
HYPERBAND_STRATEGY_CONFIG = "HyperbandStrategyConfig"
HYPERBAND_MIN_RESOURCE = "MinResource"
HYPERBAND_MAX_RESOURCE = "MaxResource"
GRID_SEARCH = "Grid"
MAX_NUMBER_OF_TRAINING_JOBS_NOT_IMPROVING = "MaxNumberOfTrainingJobsNotImproving"
BEST_OBJECTIVE_NOT_IMPROVING = "BestObjectiveNotImproving"
CONVERGENCE_DETECTED = "ConvergenceDetected"
COMPLETE_ON_CONVERGENCE_DETECTED = "CompleteOnConvergence"
TARGET_OBJECTIVE_METRIC_VALUE = "TargetObjectiveMetricValue"
MAX_RUNTIME_IN_SECONDS = "MaxRuntimeInSeconds"
logger = logging.getLogger(__name__)
class WarmStartTypes(Enum):
IDENTICAL_DATA_AND_ALGORITHM = "IdenticalDataAndAlgorithm"
TRANSFER_LEARNING = "TransferLearning"
class HyperparameterTuner(object):
"""Defines interaction with Amazon SageMaker hyperparameter tuning jobs.
It also supports deploying the resulting models.
"""
TUNING_JOB_NAME_MAX_LENGTH = 32
def __init__(
self,
model_trainer: "ModelTrainer",
objective_metric_name: Union[str, PipelineVariable],
hyperparameter_ranges: Dict[str, ParameterRange],
metric_definitions: Optional[List[Dict[str, Union[str, PipelineVariable]]]] = None,
strategy: Union[str, PipelineVariable] = "Bayesian",
objective_type: Union[str, PipelineVariable] = "Maximize",
max_jobs: Union[int, PipelineVariable] = None,
max_parallel_jobs: Union[int, PipelineVariable] = 1,
max_runtime_in_seconds: Optional[Union[int, PipelineVariable]] = None,
tags: Optional[Tags] = None,
base_tuning_job_name: Optional[str] = None,
warm_start_config: Optional[HyperParameterTuningJobWarmStartConfig] = None,
strategy_config: Optional[HyperParameterTuningJobStrategyConfig] = None,
completion_criteria_config: Optional[TuningJobCompletionCriteria] = None,
early_stopping_type: Union[str, PipelineVariable] = "Off",
model_trainer_name: Optional[str] = None,
random_seed: Optional[int] = None,
autotune: bool = False,
hyperparameters_to_keep_static: Optional[List[str]] = None,
):
"""Creates a ``HyperparameterTuner`` instance.
It takes a model_trainer to obtain configuration information for training
jobs that are created as the result of a hyperparameter tuning job.
Args:
model_trainer (sagemaker.train.model_trainer.ModelTrainer): A model_trainer object
that has been initialized with the desired configuration. There
does not need to be a training job associated with this
instance.
objective_metric_name (str or PipelineVariable): Name of the metric for evaluating
training jobs.
hyperparameter_ranges (dict[str, sagemaker.parameter.ParameterRange]): Dictionary of
parameter ranges. These parameter ranges can be one
of three types: Continuous, Integer, or Categorical. The keys of
the dictionary are the names of the hyperparameter, and the
values are the appropriate parameter range class to represent
the range.
metric_definitions (list[dict[str, str] or list[dict[str, PipelineVariable]]): A list of
dictionaries that defines the metric(s) used to evaluate the training jobs (default:
None). Each dictionary contains two keys: 'Name' for the name of
the metric, and 'Regex' for the regular expression used to
extract the metric from the logs. This should be defined only
for hyperparameter tuning jobs that don't use an Amazon
algorithm.
strategy (str or PipelineVariable): Strategy to be used for hyperparameter model_trainer.
More information about different strategies:
https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-how-it-works.html.
Available options are: 'Bayesian', 'Random', 'Hyperband',
'Grid' (default: 'Bayesian')
objective_type (str or PipelineVariable): The type of the objective metric for
evaluating training jobs. This value can be either 'Minimize' or
'Maximize' (default: 'Maximize').
max_jobs (int or PipelineVariable): Maximum total number of training jobs to start for
the hyperparameter tuning job. The default value is unspecified fot the 'Grid'
strategy and the default value is 1 for all others strategies (default: None).
max_parallel_jobs (int or PipelineVariable): Maximum number of parallel training jobs to
start (default: 1).
max_runtime_in_seconds (int or PipelineVariable): The maximum time in seconds
that a hyperparameter tuning job can run.
tags (Optional[Tags]): Tags for labeling the tuning job (default: None).
For more, see https://docs.aws.amazon.com/sagemaker/latest/dg/API_Tag.html.
base_tuning_job_name (str): Prefix for the hyperparameter tuning job
name when the :meth:`~sagemaker.core.shapes.HyperparameterTuner.train`
method launches. If not specified, a default job name is
generated, based on the training image name and current
timestamp.
warm_start_config (sagemaker.core.shapes.HyperParameterTuningJobWarmStartConfig): A
``HyperParameterTuningJobWarmStartConfig`` object that has been initialized with the
configuration defining the nature of warm start tuning job.
strategy_config (sagemaker.core.shapes.StrategyConfig): A configuration for "Hyperparameter"
tuning job optimisation strategy.
completion_criteria_config (sagemaker.core.shapes.TuningJobCompletionCriteria): A
configuration for the completion criteria.
early_stopping_type (str or PipelineVariable): Specifies whether early stopping is
enabled for the job. Can be either 'Auto' or 'Off' (default:
'Off'). If set to 'Off', early stopping will not be attempted.
If set to 'Auto', early stopping of some training jobs may
happen, but is not guaranteed to.
model_trainer_name (str): A unique name to identify a model_trainer within the
hyperparameter tuning job, when more than one model_trainer is used with
the same tuning job (default: None).
random_seed (int): An initial value used to initialize a pseudo-random number generator.
Setting a random seed will make the hyperparameter tuning search strategies to
produce more consistent configurations for the same tuning job.
autotune (bool): Whether the parameter ranges or other unset settings of a tuning job
should be chosen automatically (default: False).
hyperparameters_to_keep_static: list[str]: Names of hyperparameters that will be kept
static and will not be assigned a tunable range with Autotune functionality.
(default: None).
"""
if hyperparameter_ranges is None or len(hyperparameter_ranges) == 0:
if not autotune:
raise ValueError("Need to specify hyperparameter ranges or set autotune=True.")
if not autotune and hyperparameters_to_keep_static is not None:
raise ValueError(
"hyperparameters_to_keep_static parameter is set, however Autotune mode is not "
"enabled. Either do not set value for hyperparameters_to_keep_static parameter, "
"or enable Autotune mode by setting autotune=True."
)
if hyperparameters_to_keep_static is not None:
if len(hyperparameters_to_keep_static) != len(set(hyperparameters_to_keep_static)):
raise ValueError("Please remove duplicate names in hyperparameters_to_keep_static.")
if model_trainer_name is not None:
self.model_trainer = None
self.objective_metric_name = None
self._hyperparameter_ranges = None
self.metric_definitions = None
self.model_trainer_dict = {model_trainer_name: model_trainer}
self.objective_metric_name_dict = {model_trainer_name: objective_metric_name}
self._hyperparameter_ranges_dict = {model_trainer_name: hyperparameter_ranges}
self.metric_definitions_dict = (
{model_trainer_name: metric_definitions} if metric_definitions is not None else {}
)
self.static_hyperparameters = None
self.auto_parameters = None
self.auto_parameters_dict = None
self.hyperparameters_to_keep_static = None
self.hyperparameters_to_keep_static_dict = {
model_trainer_name: hyperparameters_to_keep_static
}
else:
self.model_trainer = model_trainer
self.objective_metric_name = objective_metric_name
self._hyperparameter_ranges = hyperparameter_ranges
self.metric_definitions = metric_definitions
self.model_trainer_dict = None
self.objective_metric_name_dict = None
self._hyperparameter_ranges_dict = None
self.metric_definitions_dict = None
self.static_hyperparameters = None
self.static_hyperparameters_dict = None
self.auto_parameters = None
self.auto_parameters_dict = None
self.hyperparameters_to_keep_static = hyperparameters_to_keep_static
self.hyperparameters_to_keep_static_dict = None
self._validate_parameter_ranges(model_trainer, hyperparameter_ranges)
self.strategy = strategy
self.strategy_config = strategy_config
self.completion_criteria_config = completion_criteria_config
self.objective_type = objective_type
# For the GridSearch strategy we expect the max_jobs equals None and recalculate it later.
# For all other strategies for the backward compatibility we keep
# the default value as 1 (previous default value).
self.max_jobs = max_jobs
if max_jobs is None and strategy != GRID_SEARCH:
self.max_jobs = 1
self.max_parallel_jobs = max_parallel_jobs
self.max_runtime_in_seconds = max_runtime_in_seconds
self.tags = format_tags(tags)
self.base_tuning_job_name = base_tuning_job_name
self._current_job_name = None
self.latest_tuning_job = None
self.warm_start_config = warm_start_config
self.early_stopping_type = early_stopping_type
self.random_seed = random_seed
self.instance_configs_dict = None
self.instance_configs = None
self.autotune = autotune
def override_resource_config(
self,
instance_configs: Union[
List[HyperParameterTuningInstanceConfig],
Dict[str, List[HyperParameterTuningInstanceConfig]],
],
):
"""Override the instance configuration of the model_trainers used by the tuner.
Args:
instance_configs (List[HyperParameterTuningInstanceConfig] or Dict[str, List[HyperParameterTuningInstanceConfig]):
The InstanceConfigs to use as an override for the instance configuration
of the model_trainer. ``None`` will remove the override.
"""
if isinstance(instance_configs, dict):
self._validate_dict_argument(
name="instance_configs",
value=instance_configs,
allowed_keys=list(self.model_trainer_dict.keys()),
)
self.instance_configs_dict = instance_configs
else:
self.instance_configs = instance_configs
if self.model_trainer_dict is not None and self.model_trainer_dict.keys():
model_trainer_names = list(self.model_trainer_dict.keys())
self.instance_configs_dict = {model_trainer_names[0]: instance_configs}
def _prepare_for_tuning(self, job_name=None):
"""Prepare the tuner instance for tuning (train)."""
self._prepare_job_name_for_tuning(job_name=job_name)
self._prepare_static_hyperparameters_for_tuning()
self._prepare_auto_parameters_for_tuning()
self._prepare_tags_for_tuning()
def _get_model_uri(
self,
model_trainer,
):
"""Return the model artifact URI used by the ModelTrainer instance.
This attribute can live in multiple places, and accessing the attribute can
raise a TypeError, which needs to be handled.
"""
try:
return getattr(model_trainer, "model_data", None)
except TypeError:
return getattr(model_trainer, "model_uri", None)
def _prepare_tags_for_tuning(self):
"""Add tags to tuning job (from ModelTrainer and JumpStart tags)."""
# Add tags from ModelTrainer class
model_trainer = (
self.model_trainer or self.model_trainer_dict[sorted(self.model_trainer_dict.keys())[0]]
)
model_trainer_tags = getattr(model_trainer, "tags", []) or []
if self.tags is None and len(model_trainer_tags) > 0:
self.tags = []
for tag in model_trainer_tags:
if tag not in self.tags:
self.tags.append(tag)
if self.sagemaker_session.settings.include_jumpstart_tags:
self.tags = add_jumpstart_uri_tags(
tags=self.tags,
training_script_uri=getattr(model_trainer, "source_code", None),
training_model_uri=self._get_model_uri(model_trainer),
)
def _prepare_job_name_for_tuning(self, job_name=None):
"""Set current job name before starting tuning."""
if job_name is not None:
self._current_job_name = job_name
else:
base_name = self.base_tuning_job_name
if base_name is None:
model_trainer = (
self.model_trainer
or self.model_trainer_dict[sorted(self.model_trainer_dict.keys())[0]]
)
base_name = base_name_from_image(
model_trainer.training_image,
default_base_name="ModelTrainer",
)
jumpstart_base_name = get_jumpstart_base_name_if_jumpstart_model(
getattr(model_trainer, "source_code", None),
self._get_model_uri(model_trainer),
)
base_name = jumpstart_base_name or base_name
self._current_job_name = name_from_base(
base_name, max_length=self.TUNING_JOB_NAME_MAX_LENGTH, short=True
)
def _prepare_static_hyperparameters_for_tuning(self):
"""Prepare static hyperparameters for all model_trainers before tuning."""
self.static_hyperparameters = None
if self.model_trainer is not None:
self.static_hyperparameters = self._prepare_static_hyperparameters(
self.model_trainer, self._hyperparameter_ranges
)
self.static_hyperparameters_dict = None
if self.model_trainer_dict is not None:
self.static_hyperparameters_dict = {
model_trainer_name: self._prepare_static_hyperparameters(
model_trainer,
self._hyperparameter_ranges_dict[model_trainer_name],
)
for (model_trainer_name, model_trainer) in self.model_trainer_dict.items()
}
def _prepare_auto_parameters_for_tuning(self):
"""Prepare auto parameters for all model_trainers before tuning."""
self.auto_parameters = None
if self.model_trainer is not None:
self.static_hyperparameters, self.auto_parameters = self._prepare_auto_parameters(
self.static_hyperparameters, self.hyperparameters_to_keep_static
)
self.auto_parameters_dict = None
if self.model_trainer_dict is not None:
static_auto_parameters_dict = {
model_trainer_name: self._prepare_auto_parameters(
self.static_hyperparameters_dict[model_trainer_name],
(
self.hyperparameters_to_keep_static_dict.get(model_trainer_name, None)
if self.hyperparameters_to_keep_static_dict
else None
),
)
for model_trainer_name in sorted(self.model_trainer_dict.keys())
}
self.static_hyperparameters_dict = {}
self.auto_parameters_dict = {}
for model_trainer_name, (
static_hyperparameters,
auto_parameters,
) in static_auto_parameters_dict.items():
self.static_hyperparameters_dict[model_trainer_name] = static_hyperparameters
self.auto_parameters_dict[model_trainer_name] = auto_parameters
@classmethod
def _prepare_static_hyperparameters(cls, model_trainer, hyperparameter_ranges):
"""Prepare static hyperparameters for one model_trainer before tuning."""
# Initialize hyperparameters if None
if model_trainer.hyperparameters is None:
model_trainer.hyperparameters = {}
# Remove any hyperparameter that will be tuned
static_hyperparameters = {
str(k): to_string(v) for (k, v) in model_trainer.hyperparameters.items()
}
if hyperparameter_ranges is not None:
for hyperparameter_name in hyperparameter_ranges.keys():
static_hyperparameters.pop(hyperparameter_name, None)
return static_hyperparameters
def _prepare_auto_parameters(self, static_hyperparameters, hyperparameters_to_keep_static):
"""Prepare auto parameters for one model_trainer before tuning."""
if not self.autotune:
return static_hyperparameters, None
if hyperparameters_to_keep_static is None:
hyperparameters_to_keep_static = {}
if not set(hyperparameters_to_keep_static).issubset(set(static_hyperparameters.keys())):
raise ValueError(
"Names in hyperparameters_to_keep_static must be members of model_trainer's "
"hyperparameters."
)
new_static_hyperparameters = {
k: v for k, v in static_hyperparameters.items() if k in hyperparameters_to_keep_static
}
auto_parameters = {
k: v
for k, v in static_hyperparameters.items()
if k not in hyperparameters_to_keep_static
}
return new_static_hyperparameters, auto_parameters
@classmethod
def _prepare_model_trainer_for_tuning(cls, model_trainer, inputs=None, job_name=None, **kwargs):
"""Prepare ModelTrainer before tuning by building sm_drivers and code channels.
This method replicates the channel-building logic from ModelTrainer._create_training_job()
to ensure the sm_drivers channel (containing torchrun_driver.py, distributed config, and
sm_train.sh) is included in the tuning job definition. Without this, the framework
container falls back to the legacy entry point (python train.py) instead of using the
V3 driver (torchrun), breaking distributed training.
Args:
model_trainer: ModelTrainer instance to prepare
inputs: Training inputs (unused, for V2 compatibility)
job_name: Job name (unused, for V2 compatibility)
**kwargs: Additional arguments (unused, for V2 compatibility)
"""
source_code = getattr(model_trainer, "source_code", None)
if source_code is None:
return
# Only proceed if source_code has a real entry_script string
entry_script = getattr(source_code, "entry_script", None)
if not isinstance(entry_script, str):
return
cls._build_driver_and_code_channels(model_trainer)
@classmethod
def _build_driver_and_code_channels(cls, model_trainer):
"""Build sm_drivers and code input channels for the tuning job.
Replicates the channel-building logic from ModelTrainer._create_training_job()
so that the tuning job gets the same execution environment as a standalone
training job (distributed drivers, source code, train script).
Args:
model_trainer: ModelTrainer instance with source_code configured
"""
import json
import os
import shutil
import time
from tempfile import TemporaryDirectory
from sagemaker.train.constants import (
SM_CODE,
SM_DRIVERS,
SM_DRIVERS_LOCAL_PATH,
DEFAULT_CONTAINER_ENTRYPOINT,
DEFAULT_CONTAINER_ARGUMENTS,
)
source_code = model_trainer.source_code
base_name = model_trainer.base_job_name or "tuning"
key_prefix = f"{base_name}/tuning-{int(time.time())}/input"
# Build sm_drivers channel (same as ModelTrainer._create_training_job)
temp_dir = TemporaryDirectory()
shutil.copytree(SM_DRIVERS_LOCAL_PATH, temp_dir.name, dirs_exist_ok=True)
# If distributed config is set, copy distributed drivers
if model_trainer.distributed:
driver_dir = os.path.join(temp_dir.name, "distributed_drivers")
shutil.copytree(model_trainer.distributed.driver_dir, driver_dir, dirs_exist_ok=True)
# Write sourcecode.json
source_code_json_path = os.path.join(temp_dir.name, "sourcecode.json")
with open(source_code_json_path, "w") as f:
dump = source_code.model_dump() if source_code else {}
f.write(json.dumps(dump))
# Write distributed.json
distributed_json_path = os.path.join(temp_dir.name, "distributed.json")
with open(distributed_json_path, "w") as f:
dump = model_trainer.distributed.model_dump() if model_trainer.distributed else {}
f.write(json.dumps(dump))
# Prepare the train script (sm_train.sh)
model_trainer._prepare_train_script(
tmp_dir=temp_dir,
source_code=source_code,
distributed=model_trainer.distributed,
)
# Upload sm_drivers channel
sm_drivers_channel = model_trainer.create_input_data_channel(
channel_name=SM_DRIVERS,
data_source=temp_dir.name,
key_prefix=key_prefix,
ignore_patterns=source_code.ignore_patterns,
)
# Store channels on model_trainer so _build_training_job_definition can pick them up
model_trainer._tuner_channels = [sm_drivers_channel]
# Set script mode hyperparameters required by framework containers.
# The framework container (PyTorch, TF) uses sagemaker_program to find the entry script
# and sagemaker_submit_directory to download source code to /opt/ml/code/.
if model_trainer.hyperparameters is None:
model_trainer.hyperparameters = {}
model_trainer.hyperparameters["sagemaker_program"] = source_code.entry_script
# Upload sourcedir.tar.gz for the legacy framework container path.
# The HPT API doesn't support container_entrypoint, so the framework container
# uses sagemaker_submit_directory to download and extract code to /opt/ml/code/.
if source_code.source_dir and not _is_valid_s3_uri(source_code.source_dir):
import tarfile
import tempfile
session = model_trainer.sagemaker_session
bucket = session.default_bucket()
s3_key = f"{key_prefix}/sourcedir/sourcedir.tar.gz"
with tempfile.NamedTemporaryFile(suffix=".tar.gz", delete=False) as tmp:
tar_path = tmp.name
try:
with tarfile.open(tar_path, "w:gz") as tar:
for root, _dirs, files in os.walk(source_code.source_dir):
for f in files:
fpath = os.path.join(root, f)
arcname = os.path.relpath(fpath, source_code.source_dir)
tar.add(fpath, arcname=arcname)
s3_client = session.boto_session.client(
"s3", region_name=session.boto_region_name
)
s3_client.upload_file(tar_path, bucket, s3_key)
model_trainer.hyperparameters["sagemaker_submit_directory"] = (
f"s3://{bucket}/{s3_key}"
)
finally:
if os.path.exists(tar_path):
os.remove(tar_path)
elif source_code.source_dir and _is_valid_s3_uri(source_code.source_dir):
model_trainer.hyperparameters["sagemaker_submit_directory"] = source_code.source_dir
# Store the temp dir reference to prevent cleanup
model_trainer._tuner_temp_dir = temp_dir
@runnable_by_pipeline
def tune(
self,
inputs: Optional[
Union[
str,
Dict[str, str],
List[Union[Channel, InputData]],
]
] = None,
job_name: Optional[str] = None,
model_trainer_kwargs: Optional[Dict[str, dict]] = None,
wait: bool = True,
**kwargs,
):
"""Start a hyperparameter tuning job.
Args:
inputs: Information about the training data. Please refer to the
``train()`` method of the associated model_trainer, as this can take
any of the following forms:
* (str) - The S3 location where training data is saved.
* (dict[str, str]) - If using multiple channels for training data, you can specify
a dict mapping channel names to S3 URI strings.
* (list[sagemaker.train.configs.Channel]) - A list of Channel objects for
detailed input data configuration.
* (list[sagemaker.train.configs.InputData]) - A list of InputData objects for
simplified input data specification.
job_name (str): Tuning job name. If not specified, the tuner
generates a default job name, based on the training image name
and current timestamp.
model_trainer_kwargs (dict[str, dict]): Dictionary for other arguments needed for
training. Should be used only for tuners created via the factory method create().
The keys are the model_trainer names for the model_trainer_dict argument of create()
method. Each value is a dictionary for the other arguments needed for training
of the corresponding model_trainer.
wait (bool): Whether the call should wait until the job completes (default: ``True``).
**kwargs: Other arguments needed for training. Please refer to the
``train()`` method of the associated model_trainer to see what other
arguments are needed.
"""
if self.model_trainer is not None:
self._train_with_model_trainer(inputs, job_name, **kwargs)
else:
self._train_with_model_trainer_dict(inputs, job_name, model_trainer_kwargs)
if wait:
self.latest_tuning_job.wait()
def _train_with_model_trainer(self, inputs, job_name):
"""Start tuning for tuner instances that have the ``model_trainer`` field set."""
# Prepare model_trainer before tuning (upload source code, set hyperparameters)
self._prepare_model_trainer_for_tuning(self.model_trainer, inputs, job_name)
self._prepare_for_tuning(job_name=job_name)
self.latest_tuning_job = self._start_tuning_job(inputs)
def _train_with_model_trainer_dict(self, inputs, job_name, model_trainer_kwargs):
"""Start tuning for tuner instances that have the ``model_trainer_dict`` field set."""
model_trainer_names = sorted(self.model_trainer_dict.keys())
self._validate_dict_argument(name="inputs", value=inputs, allowed_keys=model_trainer_names)
self._validate_dict_argument(
name="model_trainer_kwargs",
value=model_trainer_kwargs,
allowed_keys=model_trainer_names,
)
# Prepare each model_trainer before tuning (upload source code, set hyperparameters)
for model_trainer_name, model_trainer in self.model_trainer_dict.items():
ins = inputs.get(model_trainer_name, None) if inputs is not None else None
self._prepare_model_trainer_for_tuning(model_trainer, ins, job_name)
self._prepare_for_tuning(job_name=job_name)
self.latest_tuning_job = self._start_tuning_job(inputs)
def stop_tuning_job(self):
"""Stop latest running hyperparameter tuning job."""
self._ensure_last_tuning_job()
self.latest_tuning_job.stop()
def describe(self):
"""Returns a response from the DescribeHyperParameterTuningJob API call."""
self._ensure_last_tuning_job()
return self.latest_tuning_job.refresh()
def wait(self):
"""Wait for latest hyperparameter tuning job to finish."""
self._ensure_last_tuning_job()
self.latest_tuning_job.wait()
def best_training_job(self):
"""Return name of the best training job for the latest hyperparameter tuning job.
Raises:
Exception: If there is no best training job available for the
hyperparameter tuning job.
"""
return self._get_best_training_job()["TrainingJobName"]
def _get_best_training_job(self):
"""Return the best training job for the latest hyperparameter tuning job.
Raises:
Exception: If there is no best training job available for the
hyperparameter tuning job.
"""
self._ensure_last_tuning_job()
# Refresh the tuning job to get latest status
tuning_job = self.latest_tuning_job.refresh()
if tuning_job.best_training_job:
# Convert the best training job to the expected format
best_job = tuning_job.best_training_job
return {
"TrainingJobName": best_job.training_job_name,
"TrainingJobDefinitionName": best_job.training_job_definition_name
or "training-job-definition",
}
else:
raise Exception(
f"Best training job not available for tuning job: {tuning_job.hyper_parameter_tuning_job_name}"
)
def _ensure_last_tuning_job(self):
"""Placeholder docstring"""
if self.latest_tuning_job is None:
raise ValueError("No tuning job available")
@classmethod
def _prepare_init_params_from_job_description(cls, job_details):
"""Placeholder docstring"""
tuning_config = job_details["HyperParameterTuningJobConfig"]
params = {
"strategy": tuning_config["Strategy"],
"max_jobs": tuning_config["ResourceLimits"]["MaxNumberOfTrainingJobs"],
"max_parallel_jobs": tuning_config["ResourceLimits"]["MaxParallelTrainingJobs"],
"warm_start_config": HyperParameterTuningJobWarmStartConfig.from_job_desc(
job_details.get("HyperParameterTuningJobWarmStartConfig", None)
),
"early_stopping_type": tuning_config["TrainingJobEarlyStoppingType"],
"base_tuning_job_name": base_from_name(job_details["HyperParameterTuningJobName"]),
}
if "TuningJobCompletionCriteria" in tuning_config:
params["completion_criteria_config"] = TuningJobCompletionCriteria.from_job_desc(
tuning_config["TuningJobCompletionCriteria"]
)
if MAX_RUNTIME_IN_SECONDS in tuning_config["ResourceLimits"]:
params["max_runtime_in_seconds"] = tuning_config["ResourceLimits"][
MAX_RUNTIME_IN_SECONDS
]
if "RandomSeed" in tuning_config:
params["random_seed"] = tuning_config["RandomSeed"]
if "HyperParameterTuningJobObjective" in tuning_config:
params["objective_metric_name"] = tuning_config["HyperParameterTuningJobObjective"][
"MetricName"
]
params["objective_type"] = tuning_config["HyperParameterTuningJobObjective"]["Type"]
if "ParameterRanges" in tuning_config:
params["hyperparameter_ranges"] = cls._prepare_parameter_ranges_from_job_description(
tuning_config["ParameterRanges"]
)
if "TrainingJobDefinition" in job_details:
params["metric_definitions"] = job_details["TrainingJobDefinition"][
"AlgorithmSpecification"
]["MetricDefinitions"]
if "TrainingJobDefinitions" in job_details:
params["objective_type"] = job_details["TrainingJobDefinitions"][0]["TuningObjective"][
"Type"
]
return params
@classmethod
def _prepare_parameter_ranges_from_job_description(cls, parameter_ranges):
"""Placeholder docstring"""
ranges = {}
for parameter in parameter_ranges["CategoricalParameterRanges"]:
ranges[parameter["Name"]] = CategoricalParameter(parameter["Values"])
for parameter in parameter_ranges["ContinuousParameterRanges"]:
ranges[parameter["Name"]] = ContinuousParameter(
float(parameter["MinValue"]), float(parameter["MaxValue"])
)
for parameter in parameter_ranges["IntegerParameterRanges"]:
ranges[parameter["Name"]] = IntegerParameter(
int(parameter["MinValue"]), int(parameter["MaxValue"])
)
return ranges
@classmethod
def _extract_hyperparameters_from_parameter_ranges(cls, parameter_ranges):
"""Placeholder docstring"""
hyperparameters = {}
for parameter in parameter_ranges["CategoricalParameterRanges"]:
hyperparameters[parameter["Name"]] = parameter["Values"][0]
for parameter in parameter_ranges["ContinuousParameterRanges"]:
hyperparameters[parameter["Name"]] = float(parameter["MinValue"])
for parameter in parameter_ranges["IntegerParameterRanges"]:
hyperparameters[parameter["Name"]] = int(parameter["MinValue"])
return hyperparameters
def hyperparameter_ranges(self):
"""Return the hyperparameter ranges in a dictionary.
Dictionary to be used as part of a request for creating a hyperparameter tuning job.
"""
if self._hyperparameter_ranges is None:
return None
return self._prepare_parameter_ranges_for_tuning(self._hyperparameter_ranges)
def hyperparameter_ranges_dict(self):
"""Return a dictionary of hyperparameter ranges for all model_trainers in ``model_trainer_dict``"""
if self._hyperparameter_ranges_dict is None:
return None
return {
model_trainer_name: self._prepare_parameter_ranges_for_tuning(
self._hyperparameter_ranges_dict[model_trainer_name]
)
for model_trainer_name in sorted(self.model_trainer_dict.keys())
}
@classmethod
def _prepare_parameter_ranges_for_tuning(cls, parameter_ranges):
"""Prepare hyperparameter ranges for tuning"""
processed_parameter_ranges = dict()
for range_type in ParameterRange.__all_types__:
hp_ranges = []
for parameter_name, parameter in parameter_ranges.items():
if parameter is not None and parameter.__name__ == range_type:
# Get tuning range and convert keys to snake_case for v3 Pydantic models
tuning_range = parameter.as_tuning_range(parameter_name)
# Convert PascalCase keys to snake_case
tuning_range_snake = {}
for key, value in tuning_range.items():
# Convert PascalCase to snake_case
snake_key = "".join(
["_" + c.lower() if c.isupper() else c for c in key]
).lstrip("_")
tuning_range_snake[snake_key] = value
hp_ranges.append(tuning_range_snake)
processed_parameter_ranges[range_type + "ParameterRanges"] = hp_ranges
return processed_parameter_ranges
@property
def sagemaker_session(self):
"""Convenience method for accessing the SageMaker session.
It access :class:`~sagemaker.session.Session` object associated with the model_trainer
for the ``HyperparameterTuner``.
"""
model_trainer = self.model_trainer
if model_trainer is None:
first_model_trainer_name = sorted(self.model_trainer_dict.keys())[0]
model_trainer = self.model_trainer_dict[first_model_trainer_name]
return model_trainer.sagemaker_session
def analytics(self):
"""An instance of HyperparameterTuningJobAnalytics for this latest tuning job of this tuner.
Analytics olbject gives you access to tuning results summarized into a pandas dataframe.
"""
self._ensure_last_tuning_job()
return HyperparameterTuningJobAnalytics(
self.latest_tuning_job.hyper_parameter_tuning_job_name, self.sagemaker_session
)
def _validate_parameter_ranges(self, model_trainer, hyperparameter_ranges):
"""Validate hyperparameter ranges for a model_trainer"""
# ModelTrainer uses a different hyperparameter structure
# Skip validation for now as ModelTrainer handles this internally
def _validate_parameter_range(self, value_hp, parameter_range):
"""Placeholder docstring"""
for (
parameter_range_key,
parameter_range_value,
) in parameter_range.__dict__.items():
if parameter_range_key == "scaling_type":
continue
# Categorical ranges
if isinstance(parameter_range_value, list):
for categorical_value in parameter_range_value:
value_hp.validate(categorical_value)
# Continuous, Integer ranges
else:
value_hp.validate(parameter_range_value)
def transfer_learning_tuner(self, additional_parents=None, model_trainer=None):
"""Creates a new ``HyperparameterTuner``.
Creation is done by copying the request fields from the provided parent
to the new instance of ``HyperparameterTuner``.
Followed by addition of warm start configuration with the type as
"TransferLearning" and parents as the union of provided list of
``additional_parents`` and the ``self``. Also, training image in the new
tuner's model_trainer is updated with the provided ``training_image``.
Examples:
>>> parent_tuner = HyperparameterTuner.attach(tuning_job_name="parent-job-1")
>>> transfer_learning_tuner = parent_tuner.transfer_learning_tuner(
>>> additional_parents={"parent-job-2"})
Later On:
>>> transfer_learning_tuner.train(inputs={})
Args:
additional_parents (set{str}): Set of additional parents along with
the self to be used in warm starting
model_trainer (sagemaker.train.model_trainer.ModelTrainer): A ModelTrainer object
that has been initialized with the desired configuration. There
does not need to be a training job associated with this
instance.
Returns:
sagemaker.core.shapes.HyperparameterTuner: ``HyperparameterTuner``
instance which can be used to launch transfer learning tuning job.
"""
return self._create_warm_start_tuner(
additional_parents=additional_parents,
warm_start_type=WarmStartTypes.TRANSFER_LEARNING,
model_trainer=model_trainer,
)
def _create_warm_start_tuner(self, additional_parents, warm_start_type, model_trainer=None):
"""Creates a new ``HyperparameterTuner`` with ``HyperParameterTuningJobWarmStartConfig``.
Where type will be equal to ``warm_start_type`` and``parents`` would be equal
to union of ``additional_parents`` and self.
Args:
additional_parents (set{str}): Additional parents along with self,
to be used for warm starting.
warm_start_type (sagemaker.core.shapes.WarmStartTypes): Type of warm start
job.
model_trainer:
Returns:
sagemaker.core.shapes.HyperparameterTuner: Instance with the request
fields copied from self along with the warm start configuration
"""
self._ensure_last_tuning_job()
all_parents = {self.latest_tuning_job.hyper_parameter_tuning_job_name}
if additional_parents:
all_parents = all_parents.union(additional_parents)
if self.model_trainer is not None:
return HyperparameterTuner(
model_trainer=model_trainer if model_trainer else self.model_trainer,
objective_metric_name=self.objective_metric_name,
hyperparameter_ranges=self._hyperparameter_ranges,
strategy=self.strategy,
strategy_config=self.strategy_config,
completion_criteria_config=self.completion_criteria_config,
objective_type=self.objective_type,
max_jobs=self.max_jobs,
max_parallel_jobs=self.max_parallel_jobs,
max_runtime_in_seconds=self.max_runtime_in_seconds,
warm_start_config=HyperParameterTuningJobWarmStartConfig(
warm_start_type=warm_start_type, parents=all_parents
),
early_stopping_type=self.early_stopping_type,
random_seed=self.random_seed,
)
if len(self.model_trainer_dict) > 1:
raise ValueError(
"Warm start is not supported currently for tuners with multiple model_trainers"
)
if model_trainer is not None:
model_trainer_name = list(self.model_trainer_dict.keys())[0]
model_trainer_dict = {model_trainer_name: model_trainer}
else:
model_trainer_dict = self.model_trainer_dict
return HyperparameterTuner.create(
model_trainer_dict=model_trainer_dict,
objective_metric_name_dict=self.objective_metric_name_dict,
hyperparameter_ranges_dict=self._hyperparameter_ranges_dict,
metric_definitions_dict=self.metric_definitions_dict,
strategy=self.strategy,
strategy_config=self.strategy_config,
completion_criteria_config=self.completion_criteria_config,
objective_type=self.objective_type,
max_jobs=self.max_jobs,
max_parallel_jobs=self.max_parallel_jobs,
max_runtime_in_seconds=self.max_runtime_in_seconds,
warm_start_config=HyperParameterTuningJobWarmStartConfig(
warm_start_type=warm_start_type, parents=all_parents
),
early_stopping_type=self.early_stopping_type,
random_seed=self.random_seed,
)
@classmethod
def create(
cls,
model_trainer_dict,