Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable linting via ruff #1551

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
name: pre-commit
on:
pull_request:
branches: [main]
push:
branches: [main]
jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: pre-commit/[email protected]
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.291
hooks:
- id: ruff
args:
- --fix
2 changes: 1 addition & 1 deletion aarch64_linux/build_aarch64_wheel.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ def build_torchvision(host: RemoteHost, *,
# Remove .so files to force static linking
host.run_cmd("rm miniforge3/lib/libpng.so miniforge3/lib/libpng16.so miniforge3/lib/libjpeg.so")
# And patch setup.py to include libz dependency for libpng
host.run_cmd(['sed -i -e \'s/image_link_flags\.append("png")/image_link_flags += ["png", "z"]/\' vision/setup.py'])
host.run_cmd(['sed -i -e \'s/image_link_flags\\.append("png")/image_link_flags += ["png", "z"]/\' vision/setup.py'])

build_vars = ""
if branch == "nightly":
Expand Down
5 changes: 3 additions & 2 deletions analytics/cubinsizes.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
try:
from elftools.elf.elffile import ELFFile
except ModuleNotFoundError:
print(f'elftools module not found, trying to install it from pip')
print('elftools module not found, trying to install it from pip')
from pip._internal import main as pip_main
try:
pip_main(["install", "pyelftools", "--user"])
Expand Down Expand Up @@ -106,7 +106,8 @@ def main():
if os.path.splitext(fname)[1] == '.a':
with ArFileCtx(fname):
for fname in os.listdir("."):
if not fname.endswith(".o"): continue
if not fname.endswith(".o"):
continue
for section_name in section_names:
elf_sizes = compute_cubin_sizes(fname, section_name)
dict_add(results[section_name], elf_sizes)
Expand Down
2 changes: 1 addition & 1 deletion analytics/download_count_wheels.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def output_results(bytes_cache: dict) -> None:
def download_logs(log_directory: str, since: float):
dt_now = datetime.now(timezone.utc)
dt_end = datetime(dt_now.year, dt_now.month, dt_now.day, tzinfo=timezone.utc)
dt_start = dt_end - timedelta(days=1, hours=1) # Add 1 hour padding to account for potentially missed logs due to timing
dt_start = dt_end - timedelta(days=1, hours=1) # Add 1 hour padding to account for potentially missed logs due to timing
for key in tqdm(BUCKET.objects.filter(Prefix='cflogs')):
remote_fname = key.key
local_fname = os.path.join(log_directory, remote_fname)
Expand Down
2 changes: 1 addition & 1 deletion analytics/duplicates_analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def print_symbols_overlap(libname1: str, libname2: str) -> None:
sym1 = get_defined_symbols(libname1, verbose=True)
sym2 = get_defined_symbols(libname2, verbose=True)
sym1_size = sum(sym1.values())
sym2_size = sum(sym2.values())
sum(sym2.values())
sym_overlap = set(sym1.keys()).intersection(set(sym2.keys()))
overlap_size = sum(sym1[s] for s in sym_overlap)
if overlap_size == 0:
Expand Down
8 changes: 4 additions & 4 deletions analytics/s3_test_stats_analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _get_latests_git_commit_sha_list(lookback: int):
def _json_to_df(data: Dict[str, Any], granularity: str) -> pd.DataFrame:
reformed_data = list()
for fname, fdata in data['files'].items():
if granularity == 'file':
if granularity == 'file':
reformed_data.append({
"job": data['job'],
"sha": data['sha'],
Expand All @@ -42,7 +42,7 @@ def _json_to_df(data: Dict[str, Any], granularity: str) -> pd.DataFrame:
})
else:
for sname, sdata in fdata['suites'].items():
if granularity == 'suite':
if granularity == 'suite':
reformed_data.append({
"job": data['job'],
"sha": data['sha'],
Expand Down Expand Up @@ -140,8 +140,8 @@ def main():
dataframe = parse_and_export_stats(f'{cache_folder}/test_time/', granularity)
dataframe.to_pickle(output)



if __name__ == "__main__":
main()

2 changes: 1 addition & 1 deletion manywheel/build_scripts/ssl-check.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@
print("...it DIDN'T!!!!!11!!1one!")
sys.exit(1)
except EXC:
print("...it did, yay.")
print("...it did, yay.")
15 changes: 15 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
ignore = [
"E501", # line too long
"E402",
"F401",
"F403",
# "W503",
# "W504",
]
line-length = 120
select = [
"E",
"F",
"W",
]

17 changes: 8 additions & 9 deletions test/smoke_test/smoke_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,7 @@ def check_nightly_binaries_date(package: str) -> None:
from datetime import datetime, timedelta
format_dt = '%Y%m%d'

torch_str = torch.__version__
date_t_str = re.findall("dev\d+", torch.__version__)
date_t_str = re.findall(r"dev\d+", torch.__version__)
date_t_delta = datetime.now() - datetime.strptime(date_t_str[0][3:], format_dt)
if date_t_delta.days >= NIGHTLY_ALLOWED_DELTA:
raise RuntimeError(
Expand All @@ -81,7 +80,7 @@ def check_nightly_binaries_date(package: str) -> None:
for module in MODULES:
imported_module = importlib.import_module(module["name"])
module_version = imported_module.__version__
date_m_str = re.findall("dev\d+", module_version)
date_m_str = re.findall(r"dev\d+", module_version)
date_m_delta = datetime.now() - datetime.strptime(date_m_str[0][3:], format_dt)
print(f"Nightly date check for {module['name']} version {module_version}")
if date_m_delta.days > NIGHTLY_ALLOWED_DELTA:
Expand All @@ -102,7 +101,7 @@ def test_cuda_runtime_errors_captured() -> None:
else:
raise e
if(cuda_exception_missed):
raise RuntimeError( f"Expected CUDA RuntimeError but have not received!")
raise RuntimeError( "Expected CUDA RuntimeError but have not received!")

def smoke_test_cuda(package: str, runtime_error_check: str) -> None:
if not torch.cuda.is_available() and is_cuda_system:
Expand Down Expand Up @@ -145,27 +144,27 @@ def smoke_test_conv2d() -> None:

print("Testing smoke_test_conv2d")
# With square kernels and equal stride
m = nn.Conv2d(16, 33, 3, stride=2)
nn.Conv2d(16, 33, 3, stride=2)
# non-square kernels and unequal stride and with padding
m = nn.Conv2d(16, 33, (3, 5), stride=(2, 1), padding=(4, 2))
nn.Conv2d(16, 33, (3, 5), stride=(2, 1), padding=(4, 2))
# non-square kernels and unequal stride and with padding and dilation
basic_conv = nn.Conv2d(16, 33, (3, 5), stride=(2, 1), padding=(4, 2), dilation=(3, 1))
input = torch.randn(20, 16, 50, 100)
output = basic_conv(input)
basic_conv(input)

if is_cuda_system:
print("Testing smoke_test_conv2d with cuda")
conv = nn.Conv2d(3, 3, 3).cuda()
x = torch.randn(1, 3, 24, 24).cuda()
with torch.cuda.amp.autocast():
out = conv(x)
conv(x)

supported_dtypes = [torch.float16, torch.float32, torch.float64]
for dtype in supported_dtypes:
print(f"Testing smoke_test_conv2d with cuda for {dtype}")
conv = basic_conv.to(dtype).cuda()
input = torch.randn(20, 16, 50, 100, device="cuda").type(dtype)
output = conv(input)
conv(input)

def smoke_test_linalg() -> None:
print("Testing smoke_test_linalg")
Expand Down
3 changes: 0 additions & 3 deletions tox.ini

This file was deleted.