Skip to content

Commit

Permalink
Apply ruff autofixes
Browse files Browse the repository at this point in the history
  • Loading branch information
akx committed Sep 29, 2023
1 parent d8aab60 commit 12a04c9
Show file tree
Hide file tree
Showing 7 changed files with 19 additions and 19 deletions.
2 changes: 1 addition & 1 deletion aarch64_linux/build_aarch64_wheel.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ def build_torchvision(host: RemoteHost, *,
# Remove .so files to force static linking
host.run_cmd("rm miniforge3/lib/libpng.so miniforge3/lib/libpng16.so miniforge3/lib/libjpeg.so")
# And patch setup.py to include libz dependency for libpng
host.run_cmd(['sed -i -e \'s/image_link_flags\.append("png")/image_link_flags += ["png", "z"]/\' vision/setup.py'])
host.run_cmd(['sed -i -e \'s/image_link_flags\\.append("png")/image_link_flags += ["png", "z"]/\' vision/setup.py'])

build_vars = ""
if branch == "nightly":
Expand Down
5 changes: 3 additions & 2 deletions analytics/cubinsizes.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
try:
from elftools.elf.elffile import ELFFile
except ModuleNotFoundError:
print(f'elftools module not found, trying to install it from pip')
print('elftools module not found, trying to install it from pip')
from pip._internal import main as pip_main
try:
pip_main(["install", "pyelftools", "--user"])
Expand Down Expand Up @@ -106,7 +106,8 @@ def main():
if os.path.splitext(fname)[1] == '.a':
with ArFileCtx(fname):
for fname in os.listdir("."):
if not fname.endswith(".o"): continue
if not fname.endswith(".o"):
continue
for section_name in section_names:
elf_sizes = compute_cubin_sizes(fname, section_name)
dict_add(results[section_name], elf_sizes)
Expand Down
2 changes: 1 addition & 1 deletion analytics/download_count_wheels.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def output_results(bytes_cache: dict) -> None:
def download_logs(log_directory: str, since: float):
dt_now = datetime.now(timezone.utc)
dt_end = datetime(dt_now.year, dt_now.month, dt_now.day, tzinfo=timezone.utc)
dt_start = dt_end - timedelta(days=1, hours=1) # Add 1 hour padding to account for potentially missed logs due to timing
dt_start = dt_end - timedelta(days=1, hours=1) # Add 1 hour padding to account for potentially missed logs due to timing
for key in tqdm(BUCKET.objects.filter(Prefix='cflogs')):
remote_fname = key.key
local_fname = os.path.join(log_directory, remote_fname)
Expand Down
2 changes: 1 addition & 1 deletion analytics/duplicates_analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def print_symbols_overlap(libname1: str, libname2: str) -> None:
sym1 = get_defined_symbols(libname1, verbose=True)
sym2 = get_defined_symbols(libname2, verbose=True)
sym1_size = sum(sym1.values())
sym2_size = sum(sym2.values())
sum(sym2.values())
sym_overlap = set(sym1.keys()).intersection(set(sym2.keys()))
overlap_size = sum(sym1[s] for s in sym_overlap)
if overlap_size == 0:
Expand Down
8 changes: 4 additions & 4 deletions analytics/s3_test_stats_analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _get_latests_git_commit_sha_list(lookback: int):
def _json_to_df(data: Dict[str, Any], granularity: str) -> pd.DataFrame:
reformed_data = list()
for fname, fdata in data['files'].items():
if granularity == 'file':
if granularity == 'file':
reformed_data.append({
"job": data['job'],
"sha": data['sha'],
Expand All @@ -42,7 +42,7 @@ def _json_to_df(data: Dict[str, Any], granularity: str) -> pd.DataFrame:
})
else:
for sname, sdata in fdata['suites'].items():
if granularity == 'suite':
if granularity == 'suite':
reformed_data.append({
"job": data['job'],
"sha": data['sha'],
Expand Down Expand Up @@ -140,8 +140,8 @@ def main():
dataframe = parse_and_export_stats(f'{cache_folder}/test_time/', granularity)
dataframe.to_pickle(output)



if __name__ == "__main__":
main()

2 changes: 1 addition & 1 deletion manywheel/build_scripts/ssl-check.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@
print("...it DIDN'T!!!!!11!!1one!")
sys.exit(1)
except EXC:
print("...it did, yay.")
print("...it did, yay.")
17 changes: 8 additions & 9 deletions test/smoke_test/smoke_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,7 @@ def check_nightly_binaries_date(package: str) -> None:
from datetime import datetime, timedelta
format_dt = '%Y%m%d'

torch_str = torch.__version__
date_t_str = re.findall("dev\d+", torch.__version__)
date_t_str = re.findall(r"dev\d+", torch.__version__)
date_t_delta = datetime.now() - datetime.strptime(date_t_str[0][3:], format_dt)
if date_t_delta.days >= NIGHTLY_ALLOWED_DELTA:
raise RuntimeError(
Expand All @@ -81,7 +80,7 @@ def check_nightly_binaries_date(package: str) -> None:
for module in MODULES:
imported_module = importlib.import_module(module["name"])
module_version = imported_module.__version__
date_m_str = re.findall("dev\d+", module_version)
date_m_str = re.findall(r"dev\d+", module_version)
date_m_delta = datetime.now() - datetime.strptime(date_m_str[0][3:], format_dt)
print(f"Nightly date check for {module['name']} version {module_version}")
if date_m_delta.days > NIGHTLY_ALLOWED_DELTA:
Expand All @@ -102,7 +101,7 @@ def test_cuda_runtime_errors_captured() -> None:
else:
raise e
if(cuda_exception_missed):
raise RuntimeError( f"Expected CUDA RuntimeError but have not received!")
raise RuntimeError( "Expected CUDA RuntimeError but have not received!")

def smoke_test_cuda(package: str, runtime_error_check: str) -> None:
if not torch.cuda.is_available() and is_cuda_system:
Expand Down Expand Up @@ -145,27 +144,27 @@ def smoke_test_conv2d() -> None:

print("Testing smoke_test_conv2d")
# With square kernels and equal stride
m = nn.Conv2d(16, 33, 3, stride=2)
nn.Conv2d(16, 33, 3, stride=2)
# non-square kernels and unequal stride and with padding
m = nn.Conv2d(16, 33, (3, 5), stride=(2, 1), padding=(4, 2))
nn.Conv2d(16, 33, (3, 5), stride=(2, 1), padding=(4, 2))
# non-square kernels and unequal stride and with padding and dilation
basic_conv = nn.Conv2d(16, 33, (3, 5), stride=(2, 1), padding=(4, 2), dilation=(3, 1))
input = torch.randn(20, 16, 50, 100)
output = basic_conv(input)
basic_conv(input)

if is_cuda_system:
print("Testing smoke_test_conv2d with cuda")
conv = nn.Conv2d(3, 3, 3).cuda()
x = torch.randn(1, 3, 24, 24).cuda()
with torch.cuda.amp.autocast():
out = conv(x)
conv(x)

supported_dtypes = [torch.float16, torch.float32, torch.float64]
for dtype in supported_dtypes:
print(f"Testing smoke_test_conv2d with cuda for {dtype}")
conv = basic_conv.to(dtype).cuda()
input = torch.randn(20, 16, 50, 100, device="cuda").type(dtype)
output = conv(input)
conv(input)

def smoke_test_linalg() -> None:
print("Testing smoke_test_linalg")
Expand Down

0 comments on commit 12a04c9

Please sign in to comment.