Skip to content

Commit 865cf55

Browse files
committed
Fix train_pipeline test (#2299)
Summary: Pull Request resolved: #2299 Test is flaky: https://www.internalfb.com/intern/test/844425089856635?ref_report_id=0. Order of preproc modules is not always set even though test had it set. This removes the nondeterminism from the test case Reviewed By: sarckk Differential Revision: D61299548 fbshipit-source-id: 46475f81077769000fd731c9e23fc7c84e3fd54f
1 parent 306057a commit 865cf55

File tree

1 file changed

+7
-1
lines changed

1 file changed

+7
-1
lines changed

torchrec/distributed/train_pipeline/tests/test_train_pipelines.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -973,10 +973,16 @@ def test_pipeline_preproc_not_shared_with_arg_transform(self) -> None:
973973

974974
# preproc args
975975
self.assertEqual(len(pipeline._pipelined_preprocs), 2)
976-
for i, input_attr_name in [(0, "idlist_features"), (1, "idscore_features")]:
976+
input_attr_names = {"idlist_features", "idscore_features"}
977+
for i in range(len(pipeline._pipelined_preprocs)):
977978
preproc_mod = pipeline._pipelined_preprocs[i]
978979
self.assertEqual(len(preproc_mod._args), 1)
980+
981+
input_attr_name = preproc_mod._args[0].input_attrs[1]
982+
self.assertTrue(input_attr_name in input_attr_names)
979983
self.assertEqual(preproc_mod._args[0].input_attrs, ["", input_attr_name])
984+
input_attr_names.remove(input_attr_name)
985+
980986
self.assertEqual(preproc_mod._args[0].is_getitems, [False, False])
981987
# no parent preproc module in FX graph
982988
self.assertEqual(preproc_mod._args[0].preproc_modules, [None, None])

0 commit comments

Comments
 (0)