Skip to content

Commit 5ed301a

Browse files
committed
update 'pytorch_config'
Signed-off-by: helenxie-bit <[email protected]>
1 parent 2a4e3d3 commit 5ed301a

File tree

1 file changed

+12
-11
lines changed

1 file changed

+12
-11
lines changed

docs/proposals/llm-hyperparameter-optimization-api.md

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -53,10 +53,10 @@ class KatibClient(object):
5353
max_trial_count: int = None,
5454
parallel_trial_count: int = None,
5555
max_failed_trial_count: int = None,
56-
resources_per_trial: Union[dict, client.V1ResourceRequirements, None] = None,
57-
pytorch_config=katib.PyTorchConfig(
56+
pytorch_config = katib.PyTorchConfig(
5857
num_workers: int = 1,
5958
num_procs_per_worker: int = 1,
59+
resources_per_worker: Union[dict, client.V1ResourceRequirements, None] = None,
6060
),
6161
retain_trials: bool = False,
6262
env_per_trial: Optional[Union[Dict[str, str], List[Union[client.V1EnvVar, client.V1EnvFromSource]]]] = None,
@@ -81,8 +81,7 @@ class KatibClient(object):
8181
- max_trial_count: Maximum number of trials to run.
8282
- parallel_trial_count: Number of trials to run in parallel.
8383
- max_failed_trial_count: Maximum number of allowed failed trials.
84-
- resources_per_trial: Resources required per trial.
85-
- pytorch_config: Configuration for PyTorch jobs, including number of workers and processes per worker.
84+
- pytorch_config: Configuration for PyTorch jobs, including number of workers, processes per worker and resources per worker.
8685
- retain_trials: Whether to retain trial resources after completion.
8786
- env_per_trial: Environment variables for worker containers.
8887
- packages_to_install: Additional Python packages to install.
@@ -149,13 +148,15 @@ katib_client.tune(
149148
algorithm_name = "random",
150149
max_trial_count = 50,
151150
parallel_trial_count = 2,
152-
resources_per_trial = {
153-
"gpu": 8,
154-
"cpu": 20,
155-
"memory": "40G",
156-
},
157-
num_workers = 4,
158-
num_procs_per_worker = 2,
151+
pytorch_config = katib.PyTorchConfig(
152+
num_workers = 4,
153+
num_procs_per_worker = 2,
154+
resources_per_worker = {
155+
"gpu": 2,
156+
"cpu": 5,
157+
"memory": "10G",
158+
},
159+
),
159160
)
160161

161162
# Get the best hyperparameters

0 commit comments

Comments
 (0)