Skip to content

Commit d0106c7

Browse files
connernilsenfacebook-github-bot
authored andcommitted
Sync .pyre_configuration.external files for those that got out of sync with last upgrade (#2294)
Summary: Pull Request resolved: #2294 X-link: pytorch/torchx#943 This downgrades the .pyre_configurations in the following projects to keep their internal and external configurations synced. - beanmachine/beanmachine/ppl - tools/sapp - torchrec - torchx The versions were chosen to match the most recent Pyre version in [pyre-check-nightly](https://pypi.org/project/pyre-check-nightly/#history). Reviewed By: grievejia Differential Revision: D61211101 fbshipit-source-id: a491da274c65d6001d893a0ea559d781f970c9e8
1 parent 924d393 commit d0106c7

File tree

10 files changed

+46
-19
lines changed

10 files changed

+46
-19
lines changed

torchrec/distributed/benchmark/benchmark_utils.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -480,7 +480,9 @@ def fx_script_module(eager_module: torch.nn.Module) -> torch.nn.Module:
480480

481481
sharded_module = _shard_modules(
482482
module=copied_module,
483-
# pyre-ignore [6]
483+
# pyre-fixme[6]: For 2nd argument expected
484+
# `Optional[List[ModuleSharder[Module]]]` but got
485+
# `List[ModuleSharder[Variable[T (bound to Module)]]]`.
484486
sharders=[sharder],
485487
device=device,
486488
plan=plan,
@@ -489,13 +491,14 @@ def fx_script_module(eager_module: torch.nn.Module) -> torch.nn.Module:
489491

490492
if compile_mode == CompileMode.FX_SCRIPT:
491493
return fx_script_module(
492-
# pyre-ignore [6]
494+
# pyre-fixme[6]: For 1st argument expected `Module` but got
495+
# `Optional[Module]`.
493496
sharded_module
494497
if not benchmark_unsharded_module
495498
else module
496499
)
497500
else:
498-
# pyre-ignore [7]
501+
# pyre-fixme[7]: Expected `Module` but got `Optional[Module]`.
499502
return sharded_module if not benchmark_unsharded_module else module
500503

501504

torchrec/distributed/composable/tests/test_embedding.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,9 @@ def _test_sharding( # noqa C901
101101
# pyre-fixme[6]: For 1st argument expected `ProcessGroup` but got
102102
# `Optional[ProcessGroup]`.
103103
env=ShardingEnv.from_process_group(ctx.pg),
104-
# pyre-ignore
104+
# pyre-fixme[6]: For 4th argument expected
105+
# `Optional[List[ModuleSharder[Module]]]` but got
106+
# `List[EmbeddingCollectionSharder]`.
105107
sharders=[sharder],
106108
device=ctx.device,
107109
)

torchrec/distributed/keyed_jagged_tensor_pool.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -668,7 +668,8 @@ def _update_local(
668668
raise NotImplementedError("Inference does not support update")
669669

670670
def _update_preproc(self, values: KeyedJaggedTensor) -> KeyedJaggedTensor:
671-
# pyre-fixme[7]: Expected `Tensor` but got implicit return value of `None`.
671+
# pyre-fixme[7]: Expected `KeyedJaggedTensor` but got implicit return value
672+
# of `None`.
672673
pass
673674

674675

torchrec/distributed/object_pool.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -132,13 +132,15 @@ def input_dist(
132132
# pyre-ignore[2]
133133
**kwargs,
134134
) -> Awaitable[Awaitable[torch.Tensor]]:
135-
# pyre-ignore
135+
# pyre-fixme[7]: Expected `Awaitable[Awaitable[Tensor]]` but got implicit
136+
# return value of `None`.
136137
pass
137138

138139
def compute(self, ctx: ShrdCtx, dist_input: torch.Tensor) -> DistOut:
139-
# pyre-ignore
140+
# pyre-fixme[7]: Expected `DistOut` but got implicit return value of `None`.
140141
pass
141142

142143
def output_dist(self, ctx: ShrdCtx, output: DistOut) -> LazyAwaitable[Out]:
143-
# pyre-ignore
144+
# pyre-fixme[7]: Expected `LazyAwaitable[Variable[Out]]` but got implicit
145+
# return value of `None`.
144146
pass

torchrec/distributed/shards_wrapper.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,9 @@
2727
aten = torch.ops.aten # pyre-ignore[5]
2828

2929

30-
class LocalShardsWrapper(torch.Tensor): # pyre-ignore[13]: pyre is bad at __new__
30+
# pyre-fixme[13]: Attribute `_local_shards` is never initialized.
31+
# pyre-fixme[13]: Attribute `_storage_meta` is never initialized.
32+
class LocalShardsWrapper(torch.Tensor):
3133
"""
3234
A wrapper class to hold local shards of a DTensor.
3335
This class is used largely for checkpointing purposes and implicity subtypes

torchrec/distributed/test_utils/infer_utils.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -862,7 +862,8 @@ def shard_qebc(
862862
quant_model_copy = copy.deepcopy(mi.quant_model)
863863
sharded_model = _shard_modules(
864864
module=quant_model_copy,
865-
# pyre-ignore
865+
# pyre-fixme[6]: For 2nd argument expected
866+
# `Optional[List[ModuleSharder[Module]]]` but got `List[TestQuantEBCSharder]`.
866867
sharders=[sharder],
867868
device=device,
868869
plan=plan,
@@ -912,7 +913,8 @@ def shard_qec(
912913
quant_model_copy = copy.deepcopy(mi.quant_model)
913914
sharded_model = _shard_modules(
914915
module=quant_model_copy,
915-
# pyre-ignore
916+
# pyre-fixme[6]: For 2nd argument expected
917+
# `Optional[List[ModuleSharder[Module]]]` but got `List[TestQuantECSharder]`.
916918
sharders=[sharder],
917919
device=device,
918920
plan=plan,

torchrec/distributed/tests/test_infer_hetero_shardings.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,9 @@ def test_sharder_different_world_sizes_for_qec(self, sharding_device: str) -> No
110110

111111
sharded_model = _shard_modules(
112112
module=non_sharded_model,
113-
# pyre-ignore
113+
# pyre-fixme[6]: For 2nd argument expected
114+
# `Optional[List[ModuleSharder[Module]]]` but got
115+
# `List[QuantEmbeddingCollectionSharder]`.
114116
sharders=[sharder],
115117
device=torch.device(sharding_device),
116118
plan=plan,
@@ -201,7 +203,9 @@ def test_sharder_different_world_sizes_for_qebc(self) -> None:
201203
}
202204
sharded_model = _shard_modules(
203205
module=non_sharded_model,
204-
# pyre-ignore
206+
# pyre-fixme[6]: For 2nd argument expected
207+
# `Optional[List[ModuleSharder[Module]]]` but got
208+
# `List[QuantEmbeddingBagCollectionSharder]`.
205209
sharders=[sharder],
206210
device=torch.device("cpu"),
207211
plan=plan,

torchrec/distributed/tests/test_infer_shardings.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2025,7 +2025,9 @@ def test_sharded_quant_fp_ebc_tw(
20252025

20262026
sharded_model = _shard_modules(
20272027
module=quant_model,
2028-
# pyre-ignore
2028+
# pyre-fixme[6]: For 2nd argument expected
2029+
# `Optional[List[ModuleSharder[Module]]]` but got
2030+
# `List[QuantFeatureProcessedEmbeddingBagCollectionSharder]`.
20292031
sharders=[sharder],
20302032
device=local_device,
20312033
plan=plan,
@@ -2180,7 +2182,9 @@ def test_sharded_quant_fp_ebc_tw_meta(self, compute_device: str) -> None:
21802182

21812183
sharded_model = _shard_modules(
21822184
module=quant_model,
2183-
# pyre-ignore
2185+
# pyre-fixme[6]: For 2nd argument expected
2186+
# `Optional[List[ModuleSharder[Module]]]` but got
2187+
# `List[QuantFeatureProcessedEmbeddingBagCollectionSharder]`.
21842188
sharders=[sharder],
21852189
# shard on meta to simulate device movement from cpu -> meta QFPEBC
21862190
device=torch.device("meta"),

torchrec/distributed/tests/test_infer_utils.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,9 @@ def test_get_tbe_specs_from_sqebc(self) -> None:
103103

104104
sharded_model = _shard_modules(
105105
module=quant_model[0],
106-
# pyre-ignore
106+
# pyre-fixme[6]: For 2nd argument expected
107+
# `Optional[List[ModuleSharder[Module]]]` but got
108+
# `List[TestQuantEBCSharder]`.
107109
sharders=[sharder],
108110
device=device,
109111
plan=plan,
@@ -178,7 +180,9 @@ def test_get_tbe_specs_from_sqec(self) -> None:
178180

179181
sharded_model = _shard_modules(
180182
module=quant_model[0],
181-
# pyre-ignore
183+
# pyre-fixme[6]: For 2nd argument expected
184+
# `Optional[List[ModuleSharder[Module]]]` but got
185+
# `List[TestQuantECSharder]`.
182186
sharders=[sharder],
183187
device=device,
184188
plan=plan,
@@ -256,7 +260,9 @@ def test_get_all_torchrec_modules_for_single_module(self) -> None:
256260

257261
sharded_model = _shard_modules(
258262
module=quant_model[0],
259-
# pyre-ignore
263+
# pyre-fixme[6]: For 2nd argument expected
264+
# `Optional[List[ModuleSharder[Module]]]` but got
265+
# `List[TestQuantEBCSharder]`.
260266
sharders=[sharder],
261267
device=device,
262268
plan=plan,

torchrec/distributed/types.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,8 @@
4444
# other metaclasses (i.e. AwaitableMeta) for customized
4545
# behaviors, as Generic is non-trival metaclass in
4646
# python 3.6 and below
47-
from typing import GenericMeta # pyre-ignore: python 3.6
47+
# pyre-fixme[21]: Could not find name `GenericMeta` in `typing` (stubbed).
48+
from typing import GenericMeta
4849
except ImportError:
4950
# In python 3.7+, GenericMeta doesn't exist as it's no
5051
# longer a non-trival metaclass,

0 commit comments

Comments
 (0)