From 7ef78f0db9f3d35c5237bcc2b867b81ba95ae4c5 Mon Sep 17 00:00:00 2001 From: ehddnr301 Date: Wed, 6 Oct 2021 15:07:29 +0900 Subject: [PATCH] Add isort precommit MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit precommit에 isort도 추가하였습니다. --- .pre-commit-config.yaml | 5 ++++ app/api/router/predict.py | 6 ++--- app/api/router/train.py | 26 +++++++++++++----- app/database.py | 3 +-- app/models.py | 13 ++++++--- app/query.py | 2 +- app/utils.py | 45 +++++++++++++++++++++++-------- experiments/atmos_tmp_01/train.py | 18 ++++++------- experiments/expr_db.py | 3 ++- experiments/insurance/trial.py | 12 ++++++--- logger.py | 2 +- main.py | 9 ++++--- pyproject.toml | 10 +++++++ 13 files changed, 111 insertions(+), 43 deletions(-) create mode 100644 pyproject.toml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3cb791c..0c2da5d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,3 +4,8 @@ repos: hooks: - id: black language_version: python3 + - repo: https://github.com/PyCQA/isort + rev: 5.6.4 + hooks: + - id: isort + language_version: python3 \ No newline at end of file diff --git a/app/api/router/predict.py b/app/api/router/predict.py index 4c76874..4a8c74f 100644 --- a/app/api/router/predict.py +++ b/app/api/router/predict.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- from typing import List - import numpy as np from fastapi import APIRouter from starlette.concurrency import run_in_threadpool @@ -12,12 +11,13 @@ from app.utils import ScikitLearnModel, my_model from logger import L - models.Base.metadata.create_all(bind=engine) router = APIRouter( - prefix="/predict", tags=["predict"], responses={404: {"description": "Not Found"}} + prefix="/predict", + tags=["predict"], + responses={404: {"description": "Not Found"}}, ) diff --git a/app/api/router/train.py b/app/api/router/train.py index 5fda7b8..22b0daf 100644 --- a/app/api/router/train.py +++ b/app/api/router/train.py @@ -3,14 +3,21 @@ import re import subprocess - from fastapi import APIRouter -from app.utils import NniWatcher, ExperimentOwl, base_dir, get_free_port, write_yml +from app.utils import ( + ExperimentOwl, + NniWatcher, + base_dir, + get_free_port, + write_yml, +) from logger import L router = APIRouter( - prefix="/train", tags=["train"], responses={404: {"description": "Not Found"}} + prefix="/train", + tags=["train"], + responses={404: {"description": "Not Found"}}, ) @@ -44,14 +51,18 @@ def train_insurance( try: write_yml(path, experiment_name, experimenter, model_name, version) nni_create_result = subprocess.getoutput( - "nnictl create --port {} --config {}/{}.yml".format(PORT, path, model_name) + "nnictl create --port {} --config {}/{}.yml".format( + PORT, path, model_name + ) ) sucs_msg = "Successfully started experiment!" if sucs_msg in nni_create_result: p = re.compile(r"The experiment id is ([a-zA-Z0-9]+)\n") expr_id = p.findall(nni_create_result)[0] - nni_watcher = NniWatcher(expr_id, experiment_name, experimenter, version) + nni_watcher = NniWatcher( + expr_id, experiment_name, experimenter, version + ) m_process = multiprocessing.Process(target=nni_watcher.excute) m_process.start() @@ -68,6 +79,7 @@ def train_atmos(expr_name: str): """ 온도 시계열과 관련된 학습을 실행하기 위한 API입니다. + Args: expr_name(str): NNI가 실행할 실험의 이름 입니다. 이 파라미터를 기반으로 project_dir/experiments/[expr_name] 경로로 찾아가 config.yml을 이용하여 NNI를 실행합니다. @@ -83,7 +95,9 @@ def train_atmos(expr_name: str): try: nni_create_result = subprocess.getoutput( - "nnictl create --port {} --config {}/config.yml".format(nni_port, expr_path) + "nnictl create --port {} --config {}/config.yml".format( + nni_port, expr_path + ) ) sucs_msg = "Successfully started experiment!" diff --git a/app/database.py b/app/database.py index cc2bcf7..4a4f535 100644 --- a/app/database.py +++ b/app/database.py @@ -1,10 +1,9 @@ import os - from dotenv import load_dotenv from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker load_dotenv(verbose=True) diff --git a/app/models.py b/app/models.py index efd76e5..5abb93d 100644 --- a/app/models.py +++ b/app/models.py @@ -1,10 +1,17 @@ # -*- coding: utf-8 -*- import datetime - -from sqlalchemy import Column, Integer, String, FLOAT, DateTime, ForeignKey, LargeBinary -from sqlalchemy.sql.functions import now +from sqlalchemy import ( + FLOAT, + Column, + DateTime, + ForeignKey, + Integer, + LargeBinary, + String, +) from sqlalchemy.orm import relationship +from sqlalchemy.sql.functions import now from app.database import Base diff --git a/app/query.py b/app/query.py index 33f6e9b..744dd0a 100644 --- a/app/query.py +++ b/app/query.py @@ -99,4 +99,4 @@ WHERE NOT EXISTS (SELECT 1 FROM atmos_model_metadata as amm WHERE amm.model_name = '{mn}'); -""" \ No newline at end of file +""" diff --git a/app/utils.py b/app/utils.py index 57fa070..340571e 100644 --- a/app/utils.py +++ b/app/utils.py @@ -11,7 +11,6 @@ import time import zipfile - import tensorflow as tf import yaml @@ -239,7 +238,9 @@ def watch_process(self): if self.is_kill: while True: self.get_running_experiment() - if self._running_experiment and ("DONE" in self._running_experiment[0]): + if self._running_experiment and ( + "DONE" in self._running_experiment[0] + ): _stop_expr = subprocess.getoutput( "nnictl stop {}".format(self.experiment_id) ) @@ -284,7 +285,9 @@ def model_final_update(self): if saved_result is None: engine.execute( - INSERT_MODEL_CORE.format(final_result.model_name, pickled_model) + INSERT_MODEL_CORE.format( + final_result.model_name, pickled_model + ) ) engine.execute( INSERT_MODEL_METADATA.format( @@ -303,7 +306,9 @@ def model_final_update(self): > final_result[self.evaluation_criteria] ): engine.execute( - UPDATE_MODEL_CORE.format(pickled_model, saved_result.model_name) + UPDATE_MODEL_CORE.format( + pickled_model, saved_result.model_name + ) ) engine.execute( UPDATE_MODEL_METADATA.format( @@ -315,7 +320,9 @@ def model_final_update(self): ) ) - engine.execute(DELETE_ALL_EXPERIMENTS_BY_EXPR_NAME.format(self.experiment_name)) + engine.execute( + DELETE_ALL_EXPERIMENTS_BY_EXPR_NAME.format(self.experiment_name) + ) def zip_model(model_path): @@ -401,7 +408,12 @@ class ExperimentOwl: """ def __init__( - self, experiment_id, experiment_name, experiment_path, mfile_manage=True, time=5 + self, + experiment_id, + experiment_name, + experiment_path, + mfile_manage=True, + time=5, ): self.__minute = 60 self.time = time * self.__minute @@ -434,7 +446,9 @@ def main(self): expr_list = subprocess.getoutput("nnictl experiment list") running_expr = [ - expr for expr in expr_list.split("\n") if self.experiment_id in expr + expr + for expr in expr_list.split("\n") + if self.experiment_id in expr ] print(running_expr) if running_expr and ("DONE" in running_expr[0]): @@ -486,17 +500,23 @@ def update_tfmodeldb(self): if not saved_score or (metrics[0] < saved_score[0]): winner_model = os.path.join( - os.path.join(self.experiment_path, "temp", self.experiment_name) + os.path.join( + self.experiment_path, "temp", self.experiment_name + ) ) if os.path.exists: shutil.rmtree(winner_model) os.rename(exprs, winner_model) m_buffer = zip_model(winner_model) - encode_model = codecs.encode(pickle.dumps(m_buffer), "base64").decode() + encode_model = codecs.encode( + pickle.dumps(m_buffer), "base64" + ).decode() engine.execute( - INSERT_OR_UPDATE_MODEL.format(mn=self.experiment_name, mf=encode_model) + INSERT_OR_UPDATE_MODEL.format( + mn=self.experiment_name, mf=encode_model + ) ) engine.execute( INSERT_OR_UPDATE_SCORE.format( @@ -506,7 +526,10 @@ def update_tfmodeldb(self): score2=metrics[1], ) ) - L.info("saved model %s %s" % (self.experiment_id, self.experiment_name)) + L.info( + "saved model %s %s" + % (self.experiment_id, self.experiment_name) + ) def modelfile_cleaner(self): """ diff --git a/experiments/atmos_tmp_01/train.py b/experiments/atmos_tmp_01/train.py index 4a541c0..89bfb4e 100644 --- a/experiments/atmos_tmp_01/train.py +++ b/experiments/atmos_tmp_01/train.py @@ -1,23 +1,21 @@ import os import sys import time + from preprocessing import preprocess sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -import numpy as np import nni +import numpy as np import pandas as pd import tensorflow as tf +from expr_db import connect +from sklearn.metrics import mean_absolute_error, mean_squared_error from tensorflow import keras -from tensorflow.keras.models import Sequential -from tensorflow.keras.layers import Dense from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint -from tensorflow.keras.layers import GRU -from sklearn.metrics import mean_absolute_error, mean_squared_error - - -from expr_db import connect +from tensorflow.keras.layers import GRU, Dense +from tensorflow.keras.models import Sequential physical_devices = tf.config.list_physical_devices("GPU") if physical_devices: @@ -32,7 +30,9 @@ def make_dataset(data, label, window_size=365, predsize=None): for i in range(len(data) - (window_size + predsize)): feature_list.append(np.array(data.iloc[i : i + window_size])) label_list.append( - np.array(label.iloc[i + window_size : i + window_size + predsize]) + np.array( + label.iloc[i + window_size : i + window_size + predsize] + ) ) else: for i in range(len(data) - window_size): diff --git a/experiments/expr_db.py b/experiments/expr_db.py index fc6ae08..2a4542b 100644 --- a/experiments/expr_db.py +++ b/experiments/expr_db.py @@ -1,6 +1,7 @@ import os -from dotenv import load_dotenv + import sqlalchemy +from dotenv import load_dotenv def connect(db="postgres"): diff --git a/experiments/insurance/trial.py b/experiments/insurance/trial.py index 6c61747..0ecbfe4 100644 --- a/experiments/insurance/trial.py +++ b/experiments/insurance/trial.py @@ -4,7 +4,6 @@ import pickle import sys - sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) @@ -41,7 +40,9 @@ def preprocess(x_train, x_valid, col_list): encoder = LabelEncoder() for col in col_list: - tmp_x_train.loc[:, col] = encoder.fit_transform(tmp_x_train.loc[:, col]) + tmp_x_train.loc[:, col] = encoder.fit_transform( + tmp_x_train.loc[:, col] + ) tmp_x_valid.loc[:, col] = encoder.transform(tmp_x_valid.loc[:, col]) return tmp_x_train.values, tmp_x_valid.values @@ -87,7 +88,12 @@ def main(params, engine, experiment_info, connection): model = XGBRegressor(**params) # 모델 학습 및 Early Stopping 적용 - model.fit(x_tra, y_train, eval_set=[(x_val, y_valid)], early_stopping_rounds=10) + model.fit( + x_tra, + y_train, + eval_set=[(x_val, y_valid)], + early_stopping_rounds=10, + ) y_train_pred = model.predict(x_tra) y_valid_pred = model.predict(x_val) diff --git a/logger.py b/logger.py index b3f056d..8514f1a 100644 --- a/logger.py +++ b/logger.py @@ -1,7 +1,7 @@ import logging import logging.handlers -from colorlog import ColoredFormatter +from colorlog import ColoredFormatter L = logging.getLogger("snowdeer_log") L.setLevel(logging.DEBUG) diff --git a/main.py b/main.py index 9e40cab..d0f3693 100644 --- a/main.py +++ b/main.py @@ -1,7 +1,6 @@ +import uvicorn from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware -import uvicorn - from app.api.router import predict, train @@ -28,5 +27,9 @@ def hello_world(): if __name__ == "__main__": uvicorn.run( - "main:app", host="0.0.0.0", port=8000, reload=True, reload_dirs=["app/"] + "main:app", + host="0.0.0.0", + port=8000, + reload=True, + reload_dirs=["app/"], ) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..070c53c --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,10 @@ +[tool.isort] +multi_line_output=3 +include_trailing_comma=true +force_grid_wrap=0 +use_parentheses=true +line_length=79 + +[tool.black] +line-length = 79 +target-version = ['py38'] \ No newline at end of file