Skip to content

Commit 7384a76

Browse files
todor-markovrachellim
authored andcommitted
update fine_tunes cli use_packing argument (#38)
1 parent 71c347d commit 7384a76

File tree

1 file changed

+13
-6
lines changed

1 file changed

+13
-6
lines changed

openai/cli.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -287,8 +287,7 @@ def create(cls, args):
287287
create_args["batch_size"] = args.batch_size
288288
if args.learning_rate_multiplier:
289289
create_args["learning_rate_multiplier"] = args.learning_rate_multiplier
290-
if args.use_packing:
291-
create_args["use_packing"] = args.use_packing
290+
create_args["use_packing"] = args.use_packing
292291
if args.prompt_loss_weight:
293292
create_args["prompt_loss_weight"] = args.prompt_loss_weight
294293
if args.compute_classification_metrics:
@@ -646,13 +645,21 @@ def help(args):
646645
)
647646
sub.add_argument(
648647
"--use_packing",
649-
type=bool,
650-
help="On classification tasks, we recommend setting this to `false`. "
651-
"On all other tasks, we recommend setting it to `true`. "
652-
"When `true`, we pack as many prompt-completion pairs as possible into each "
648+
action="store_true",
649+
dest="use_packing",
650+
help="On classification tasks, we recommend not setting this flag. "
651+
"On all other tasks, we recommend setting it. "
652+
"When set, we pack as many prompt-completion pairs as possible into each "
653653
"training example. This greatly increases the speed of a fine-tuning job, "
654654
"often without negatively affecting model performance.",
655655
)
656+
sub.add_argument(
657+
"--no_packing",
658+
action="store_false",
659+
dest="use_packing",
660+
help="Disables the packing flag (see --use_packing for description)",
661+
)
662+
sub.set_defaults(use_packing=True)
656663
sub.add_argument(
657664
"--prompt_loss_weight",
658665
type=float,

0 commit comments

Comments
 (0)