Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Split Dataset module #2

Merged
merged 4 commits into from
Aug 9, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,16 @@ The train script create a new directory for each execution using the date.
## Pipeline architecture

* 📁 pipeline/
* 📁 datasets/
* 📄 cityscapes.py
* 📄 minicity.py
* 📄 tunnel.py
* 📁 helpers/
* 📄 ARGS.py
* 📄 helpers.py
* 📄 labels.py
* 📁 learning/
* 📄 cityscapes.py
* 📄 minicity.py
* 📄 learner.py
* 📄 tunnel.py
* 📄 utils.py
* 📁 models/
* 📁 configs/
Expand Down
2 changes: 1 addition & 1 deletion create_labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from PIL import ImageDraw
import matplotlib.pyplot as plt

from learning.tunnel import classToVal
from datasets.tunnel import classToVal

# Dataset tree must be like :
#
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion helpers/ARGS.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,5 @@ def __init__(self, model, dataset_path, num_classes, labels_type, batch_size=8,
self.epochs = epochs
self.save_path = ""
self.is_pytorch_model = True
self.num_classes = 30
self.num_classes = num_classes
self.labels_type = labels_type
6 changes: 3 additions & 3 deletions inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,16 @@
from PIL import Image

from helpers.ARGS import ARGS
from learning.tunnel import Tunnel
from models.utils import get_model
from helpers.helpers import vislbl
from datasets.tunnel import Tunnel
from models.utils import get_model

def main():
# Get tunnel dataset
Dataset = Tunnel

# Set up execution arguments
args = ARGS("DenseASPP121", "tunnel", len(Dataset.validClasses), labels_type="csv", batch_size=2, epochs=10)
args = ARGS("DenseASPP121", "tunnel", len(Dataset.validClasses), labels_type="csv", batch_size=2, epochs=2)

model, args = get_model(args)

Expand Down
10 changes: 5 additions & 5 deletions learning/learner.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@


# Function used to train the model on one epoch
def train_epoch(dataloader, model, criterion, optimizer, lr_scheduler, epoch, void=-1, args=None):
def train_epoch(dataloader, model, criterion, optimizer, lr_scheduler, epoch, validClasses, void=-1, args=None):
batch_time = AverageMeter('Time', ':6.3f')
data_time = AverageMeter('Data', ':6.3f')
loss_running = AverageMeter('Loss', ':.4e')
Expand Down Expand Up @@ -45,13 +45,13 @@ def train_epoch(dataloader, model, criterion, optimizer, lr_scheduler, epoch, vo
for i in range(inputs.size()[0]):
rand_idx = np.random.randint(inputs.size()[0])
# wall(3) --> sidewalk(1)
copyblob(src_img=inputs[i], src_mask=labels[i], dst_img=inputs[rand_idx], dst_mask=labels[rand_idx], src_class=3, dst_class=1)
copyblob(validClasses, src_img=inputs[i], src_mask=labels[i], dst_img=inputs[rand_idx], dst_mask=labels[rand_idx], src_class=3, dst_class=1)
# fence(4) --> sidewalk(1)
copyblob(src_img=inputs[i], src_mask=labels[i], dst_img=inputs[rand_idx], dst_mask=labels[rand_idx], src_class=4, dst_class=1)
copyblob(validClasses, src_img=inputs[i], src_mask=labels[i], dst_img=inputs[rand_idx], dst_mask=labels[rand_idx], src_class=4, dst_class=1)
# bus(15) --> road(0)
copyblob(src_img=inputs[i], src_mask=labels[i], dst_img=inputs[rand_idx], dst_mask=labels[rand_idx], src_class=15, dst_class=0)
copyblob(validClasses, src_img=inputs[i], src_mask=labels[i], dst_img=inputs[rand_idx], dst_mask=labels[rand_idx], src_class=15, dst_class=0)
# train(16) --> road(0)
copyblob(src_img=inputs[i], src_mask=labels[i], dst_img=inputs[rand_idx], dst_mask=labels[rand_idx], src_class=16, dst_class=0)
copyblob(validClasses, src_img=inputs[i], src_mask=labels[i], dst_img=inputs[rand_idx], dst_mask=labels[rand_idx], src_class=16, dst_class=0)

inputs = inputs.float().cuda()
labels = labels.long().cuda()
Expand Down
8 changes: 3 additions & 5 deletions learning/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@
import torch.nn.functional as F
from torch.autograd import Variable

from learning.minicity import MiniCity

# Getter function for dataloaders
def get_dataloader(dataset, args):
#args = args
Expand Down Expand Up @@ -132,9 +130,9 @@ def rand_bbox(size, lam):
return bbx1, bby1, bbx2, bby2


def copyblob(src_img, src_mask, dst_img, dst_mask, src_class, dst_class):
mask_hist_src, _ = np.histogram(src_mask.numpy().ravel(), len(MiniCity.validClasses)-1, [0, len(MiniCity.validClasses)-1])
mask_hist_dst, _ = np.histogram(dst_mask.numpy().ravel(), len(MiniCity.validClasses)-1, [0, len(MiniCity.validClasses)-1])
def copyblob( validClasses, src_img, src_mask, dst_img, dst_mask, src_class, dst_class):
mask_hist_src, _ = np.histogram(src_mask.numpy().ravel(), len(validClasses)-1, [0, len(validClasses)-1])
mask_hist_dst, _ = np.histogram(dst_mask.numpy().ravel(), len(validClasses)-1, [0, len(validClasses)-1])

if mask_hist_src[src_class] != 0 and mask_hist_dst[dst_class] != 0:
""" copy src blob and paste to any dst blob"""
Expand Down
4 changes: 2 additions & 2 deletions train.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
from helpers.ARGS import ARGS
from helpers.helpers import plot_learning_curves
from learning.learner import train_epoch, validate_epoch
from learning.tunnel import Tunnel
from learning.utils import get_dataloader
from datasets.tunnel import Tunnel

def main():
# Get tunnel dataset
Expand Down Expand Up @@ -50,7 +50,7 @@ def main():
# Training loop
for epoch in range(args.epochs):
# Train the model for an epoch
train_loss, train_acc = train_epoch(dataloaders["train"], model, loss_fn, optimizer, scheduler, epoch, args=args)
train_loss, train_acc = train_epoch(dataloaders["train"], model, loss_fn, optimizer, scheduler, epoch, Dataset.validClasses, args=args)

# Save trains metrics
metrics['train_loss'].append(train_loss)
Expand Down