Skip to content

Commit

Permalink
Merge branch 'master' into hotfix/DetectionRandomAffine-fix-target-size
Browse files Browse the repository at this point in the history
  • Loading branch information
cansik authored Jun 3, 2024
2 parents 1458b07 + af93ec3 commit f2a90f0
Show file tree
Hide file tree
Showing 10 changed files with 20 additions and 17 deletions.
2 changes: 1 addition & 1 deletion src/super_gradients/common/auto_logging/auto_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def _setup_logging(self, filename: str, copy_already_logged_messages: bool, file

if copy_already_logged_messages and self.filename is not None and os.path.exists(self.filename):
with open(self.filename, "r", encoding="utf-8") as src:
with open(filename, "w") as dst:
with open(filename, "w", encoding="utf-8") as dst:
dst.write(src.read())

file_logging_level = log_level or env_variables.FILE_LOG_LEVEL
Expand Down
2 changes: 1 addition & 1 deletion src/super_gradients/common/plugins/deci_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def load_code_from_zipfile(*, file: str, target_path: str, package_name: str = "
zipfile.extractall(package_path)

# add an init file that imports all code files
with open(os.path.join(package_path, "__init__.py"), "w") as init_file:
with open(os.path.join(package_path, "__init__.py"), "w", encoding="utf-8") as init_file:
all_str = "\n\n__all__ = ["
for code_file in os.listdir(path=package_path):
if code_file.endswith(".py") and not code_file.startswith("__init__"):
Expand Down
4 changes: 2 additions & 2 deletions src/super_gradients/common/sg_loggers/base_sg_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def _init_log_file(self):

@multi_process_safe
def _write_to_log_file(self, lines: list):
with open(self.experiment_log_path, "a" if os.path.exists(self.experiment_log_path) else "w") as log_file:
with open(self.experiment_log_path, "a" if os.path.exists(self.experiment_log_path) else "w", encoding="utf-8") as log_file:
for line in lines:
log_file.write(line + "\n")

Expand Down Expand Up @@ -345,7 +345,7 @@ def _save_code(self):
name = name + ".py"

path = os.path.join(self._local_dir, name)
with open(path, "w") as f:
with open(path, "w", encoding="utf-8") as f:
f.write(code)

self.add_file(name)
Expand Down
2 changes: 1 addition & 1 deletion src/super_gradients/convert_recipe_to_code.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ def main():
key_to_replace_with = f"{key}"
content = content.replace(key_to_search, key_to_replace_with)

with open(output_script_path, "w") as f:
with open(output_script_path, "w", encoding="utf-8") as f:
black = try_import_black()
if black is not None:
content = black.format_str(content, mode=black.FileMode(line_length=160))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def get_coord(box_coord):
xmin, ymin, xmax, ymax = get_coord("xmin"), get_coord("ymin"), get_coord("xmax"), get_coord("ymax")
labels.append(" ".join([xmin, ymin, xmax, ymax, str(PASCAL_VOC_2012_CLASSES_LIST.index(cls))]))

with open(new_label_path, "w") as f:
with open(new_label_path, "w", encoding="utf-8") as f:
f.write("\n".join(labels))

urls = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def precompute_sample_repeat_factors(

str_repeat_factors = [np.format_float_positional(rf, trim="0", precision=4) for rf in repeat_factors]

with open(output_path, "w") as f:
with open(output_path, "w", encoding="utf-8") as f:
json.dump(str_repeat_factors, f)

@staticmethod
Expand Down
17 changes: 10 additions & 7 deletions src/super_gradients/training/sg_trainer/sg_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -679,6 +679,15 @@ def _save_checkpoint(
train_metrics_titles = get_metrics_titles(self.train_metrics)
all_metrics["train"] = {metric_name: float(train_metrics_dict[metric_name]) for metric_name in train_metrics_titles}

best_checkpoint = (curr_tracked_metric > self.best_metric and self.greater_metric_to_watch_is_better) or (
curr_tracked_metric < self.best_metric and not self.greater_metric_to_watch_is_better
)

if best_checkpoint:
# STORE THE CURRENT metric AS BEST
self.best_metric = curr_tracked_metric
self._best_ckpt_metrics = all_metrics

# BUILD THE state_dict
state = {
"net": unwrap_model(self.net).state_dict(),
Expand Down Expand Up @@ -713,13 +722,7 @@ def _save_checkpoint(
self.sg_logger.add_checkpoint(tag=f"ckpt_epoch_{epoch}.pth", state_dict=state, global_step=epoch)

# OVERRIDE THE BEST CHECKPOINT AND best_metric IF metric GOT BETTER THAN THE PREVIOUS BEST
if (curr_tracked_metric > self.best_metric and self.greater_metric_to_watch_is_better) or (
curr_tracked_metric < self.best_metric and not self.greater_metric_to_watch_is_better
):
# STORE THE CURRENT metric AS BEST
self.best_metric = curr_tracked_metric

self._best_ckpt_metrics = all_metrics
if best_checkpoint:
self.sg_logger.add_checkpoint(tag=self.ckpt_best_name, state_dict=state, global_step=epoch)

# RUN PHASE CALLBACKS
Expand Down
2 changes: 1 addition & 1 deletion tests/unit_tests/export_detection_model_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -568,7 +568,7 @@ def manual_test_export_export_all_variants(self):
os.makedirs(export_dir, exist_ok=True)

benchmark_command_dir = "benchmark_command.sh"
with open(benchmark_command_dir, "w") as f:
with open(benchmark_command_dir, "w", encoding="utf-8") as f:
pass

for output_predictions_format in [DetectionOutputFormatMode.BATCH_FORMAT, DetectionOutputFormatMode.FLAT_FORMAT]:
Expand Down
2 changes: 1 addition & 1 deletion tests/unit_tests/pose_estimation_metrics_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def convert_predictions_to_target_format(preds) -> List[PoseEstimationPrediction
with tempfile.TemporaryDirectory() as td:
res_file = os.path.join(td, "keypoints_coco2017_results.json")

with open(res_file, "w") as f:
with open(res_file, "w", encoding="utf-8") as f:
json.dump(coco_pred, f, sort_keys=True, indent=4)

coco_dt = self._load_coco_groundtruth(with_crowd, with_duplicates, with_invisible_keypoitns)
Expand Down
2 changes: 1 addition & 1 deletion utils_script/create_sub_coco.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def _copy_to_new_dir(mode: str, n_images: int, input_data_dir: Path, dest_data_d
dest_images_dir = dest_data_dir / "images" / f"{mode}2017"
dest_images_dir.mkdir(exist_ok=True, parents=True)

with open(dest_instances_path, "w") as f:
with open(dest_instances_path, "w", encoding="utf-8") as f:
json.dump(instances, f)

for image_name in kept_images_name:
Expand Down

0 comments on commit f2a90f0

Please sign in to comment.