Skip to content

Commit ee93cba

Browse files
stroxlerfacebook-github-bot
authored andcommitted
Bump torchx pyre-check-nightly version (#592)
Summary: Pull Request resolved: #592 Bump pyre-nightly to the latest release, to prevent errors due to error suppressions. To avoid an unnecessary suppression on the `filelock.FileLock`, which Pyre considers to be abstract, add a stub where `BaseFileLock` is not abstract. Reviewed By: pradeep90 Differential Revision: D39139755 fbshipit-source-id: 96cb70813eff347f19b6d3134012c7f8ce034684
1 parent 47464ff commit ee93cba

File tree

3 files changed

+64
-5
lines changed

3 files changed

+64
-5
lines changed

.pyre_configuration

+7-1
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,16 @@
66
".*/IPython/core/tests/nonascii.*",
77
".*/torchx/examples/apps/compute_world_size/.*"
88
],
9+
"ignore_all_errors": [
10+
"./stubs/"
11+
],
912
"site_package_search_strategy": "all",
1013
"source_directories": [
1114
"."
1215
],
16+
"search_path": [
17+
"stubs"
18+
],
1319
"strict": true,
14-
"version": "0.0.101650971427"
20+
"version": "0.0.101662549039"
1521
}

stubs/filelock.pyi

+53
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the BSD-style license found in the
5+
# LICENSE file in the root directory of this source tree.
6+
7+
"""
8+
In the actual implementation of `filelock`, the type `BaseFileLock` is marked
9+
as abstract. And Pyre does not allow us to instantiate a variable of `Type[X]`
10+
when `X` is abstract because we cannot be sure that the type is valid to
11+
instantiate.
12+
13+
In reality, the `FileLock` type is always some concrete instantiatable type so
14+
it is okay to use; this stub makes Pyre happy by declaring BaseFileLock as not
15+
abstract.
16+
"""
17+
18+
import types
19+
import typing as t
20+
21+
class Timeout(TimeoutError):
22+
lock_file: str
23+
def __init__(self, lock_file: str) -> None: ...
24+
def __str__(self) -> str: ...
25+
26+
class BaseFileLock:
27+
def __init__(self, lock_file: str, timeout: float = -1) -> None: ...
28+
@property
29+
def lock_file(self) -> str: ...
30+
@property
31+
def timeout(self) -> float: ...
32+
@timeout.setter
33+
def timeout(self, value: float) -> None: ...
34+
@property
35+
def is_locked(self) -> bool: ...
36+
def acquire(
37+
self, timeout: t.Optional[float] = None, poll_intervall: float = 0.05
38+
) -> t.Any: ...
39+
def release(self, force: bool = False) -> None: ...
40+
def __enter__(self) -> BaseFileLock: ...
41+
def __exit__(
42+
self,
43+
exc_type: t.Optional[type],
44+
exc_value: t.Optional[Exception],
45+
traceback: t.Optional[types.TracebackType],
46+
) -> None: ...
47+
def __del__(self) -> None: ...
48+
49+
class WindowsFileLock(BaseFileLock): ...
50+
class UnixFileLock(BaseFileLock): ...
51+
class SoftFileLock(BaseFileLock): ...
52+
53+
FileLock: t.Type[BaseFileLock]

torchx/schedulers/test/train.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -21,19 +21,19 @@ def compute_world_size() -> int:
2121
backend = "gloo"
2222

2323
print(f"initializing `{backend}` process group")
24-
init_process_group( # pyre-ignore[16]
24+
init_process_group(
2525
backend=backend,
2626
init_method=f"tcp://{master_addr}:{master_port}",
2727
rank=rank,
2828
world_size=world_size,
2929
)
3030
print("successfully initialized process group")
3131

32-
rank = get_rank() # pyre-ignore[16]
33-
world_size = get_world_size() # pyre-ignore[16]
32+
rank = get_rank()
33+
world_size = get_world_size()
3434

3535
t = F.one_hot(torch.tensor(rank), num_classes=world_size)
36-
all_reduce(t) # pyre-ignore[16]
36+
all_reduce(t)
3737
computed_world_size = int(torch.sum(t).item())
3838
print(
3939
f"rank: {rank}, actual world_size: {world_size}, computed world_size: {computed_world_size}"

0 commit comments

Comments
 (0)