Skip to content

Commit

Permalink
remove resources per trial
Browse files Browse the repository at this point in the history
Signed-off-by: Jack Luar <[email protected]>
  • Loading branch information
luarss committed Sep 12, 2024
1 parent 431095d commit 2757611
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 9 deletions.
9 changes: 1 addition & 8 deletions tools/AutoTuner/src/autotuner/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -750,13 +750,6 @@ def parse_arguments():
default=1,
help="Number of iterations for tuning.",
)
tune_parser.add_argument(
"--resources_per_trial",
type=float,
metavar="<float>",
default=1,
help="Number of CPUs to request for each tunning job.",
)
tune_parser.add_argument(
"--reference",
type=str,
Expand Down Expand Up @@ -1018,7 +1011,7 @@ def sweep():
local_dir=LOCAL_DIR,
resume=args.resume,
stop={"training_iteration": args.iterations},
resources_per_trial={"cpu": args.resources_per_trial},
resources_per_trial={"cpu": os.cpu_count()/args.jobs},
log_to_file=["trail-out.log", "trail-err.log"],
trial_name_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray",
trial_dirname_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray",
Expand Down
2 changes: 1 addition & 1 deletion tools/AutoTuner/test/resume_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def setUp(self):
f" {c}"
for c in options
]
self.failCommands = [] # TODO


def test_tune_resume(self):
# Goal is to first run the first config (without resume) and then run the second config (with resume)
Expand Down

0 comments on commit 2757611

Please sign in to comment.