Skip to content

Commit

Permalink
Created demo using package
Browse files Browse the repository at this point in the history
  • Loading branch information
jsbroks committed Mar 16, 2019
1 parent c813424 commit 30ec317
Show file tree
Hide file tree
Showing 17 changed files with 1,055 additions and 206 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -103,3 +103,5 @@ ENV/

# mypy
.mypy_cache/

/models/*.h5
11 changes: 11 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM python:3.6

WORKDIR /temp/
ADD requirements.txt /temp/
RUN pip install -r requirements.txt && \
pip install pycocotools

WORKDIR /workspace/

EXPOSE 8888
CMD jupyter notebook --port=8888 --ip=0.0.0.0 --no-browser --allow-root
64 changes: 0 additions & 64 deletions demo.py

This file was deleted.

4 changes: 4 additions & 0 deletions dextr/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .helpers import *
from .resnet import *
from .classifiers import *
from .model import *
7 changes: 4 additions & 3 deletions dextr/classifiers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@
from keras.layers import Conv2D
from keras.layers import Layer

from networks import resnet
import keras.backend as K
from keras.backend import tf as ktf

from .helpers import BN


class Upsampling(Layer):

Expand Down Expand Up @@ -58,7 +59,7 @@ def psp_block(prev_layer, level, feature_map_shape, input_shape):
strides = (kernel_strides_map[level][1], kernel_strides_map[level][1])
prev_layer = AveragePooling2D(kernel, strides=strides)(prev_layer)
prev_layer = Conv2D(512, (1, 1), strides=(1, 1), name=names[0], use_bias=False)(prev_layer)
prev_layer = resnet.BN(bn_axis, name=names[1])(prev_layer)
prev_layer = BN(bn_axis, name=names[1])(prev_layer)
prev_layer = Activation('relu')(prev_layer)
prev_layer = Upsampling(feature_map_shape)(prev_layer)
return prev_layer
Expand Down Expand Up @@ -87,7 +88,7 @@ def build_pyramid_pooling_module(res, input_shape, nb_classes, sigmoid=False, ou
interp_block6,
res])
x = Conv2D(512, (1, 1), strides=(1, 1), padding="same", name="class_psp_reduce_conv", use_bias=False)(res)
x = resnet.BN(bn_axis, name="class_psp_reduce_bn")(x)
x = BN(bn_axis, name="class_psp_reduce_bn")(x)
x = Activation('relu')(x)

x = Conv2D(nb_classes, (1, 1), strides=(1, 1), name="class_psp_final_conv")(x)
Expand Down
123 changes: 0 additions & 123 deletions dextr/dextr.py

This file was deleted.

15 changes: 10 additions & 5 deletions helpers/helpers.py → dextr/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,28 @@
import random
import numpy as np

from keras.layers import BatchNormalization


def BN(axis, name=""):
return BatchNormalization(axis=axis, momentum=0.1, name=name, epsilon=1e-5)


def tens2image(im):
if im.size()[0] == 1:
tmp = np.squeeze(im.numpy(), axis=0)
else:
tmp = im.numpy()
if tmp.ndim == 2:
return tmp
else:
return tmp.transpose((1, 2, 0))

return temp if temp.ndim == 2 else tmp.transpose((1, 2, 0))


def crop2fullmask(crop_mask, bbox, im=None, im_size=None, zero_pad=False, relax=0, mask_relax=True,
interpolation=cv2.INTER_CUBIC, scikit=False):
interpolation=cv2.INTER_CUBIC, scikit=True):
if scikit:
from skimage.transform import resize as sk_resize
assert(not(im is None and im_size is None)), 'You have to provide an image or the image size'

if im is None:
im_si = im_size
else:
Expand Down
58 changes: 58 additions & 0 deletions dextr/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#!/usr/bin/env python
from os.path import splitext, join
import numpy as np
from scipy import misc
from keras import backend as K

import tensorflow as tf
from .resnet import build_network
from .helpers import *


class DEXTR(object):
"""Pyramid Scene Parsing Network by Hengshuang Zhao et al 2017"""

def __init__(self, nb_classes, resnet_layers, input_shape, num_input_channels=4,
classifier='psp', weights_path='models/dextr_pascal-sbd.h5', sigmoid=False):
self.input_shape = input_shape
self.num_input_channels = num_input_channels
self.sigmoid = sigmoid
self.model = build_network(nb_classes=nb_classes, resnet_layers=resnet_layers, num_input_channels=num_input_channels,
input_shape=self.input_shape, classifier=classifier, sigmoid=self.sigmoid, output_size=self.input_shape)

self.model.load_weights(weights_path)

def feed_forward(self, data):

assert data.shape == (self.input_shape[0], self.input_shape[1], self.num_input_channels)
prediction = self.model.predict(np.expand_dims(data, 0))[0]

return prediction

def predict_mask(self, image, points, pad=50, threshold=0.8, zero_pad=True):
points = np.array(points).astype(np.int)
image = np.array(image)
bbox = get_bbox(image, points=points, pad=pad, zero_pad=zero_pad)
crop_image = crop_from_bbox(image, bbox, zero_pad=zero_pad)
resize_image = fixed_resize(crop_image, (512, 512)).astype(np.float32)

# Generate extreme point heat map normalized to image values
extreme_points = points - [np.min(points[:, 0]), np.min(points[:, 1])] + [pad , pad]
extreme_points = (512 * extreme_points * [1 / crop_image.shape[1], 1 / crop_image.shape[0]]).astype(np.int)
extreme_heatmap = make_gt(resize_image, extreme_points, sigma=10)
extreme_heatmap = cstm_normalize(extreme_heatmap, 255)

# Concatenate inputs and convert to tensor
input_dextr = np.concatenate((resize_image, extreme_heatmap[:, :, np.newaxis]), axis=2)

pred = self.model.predict(input_dextr[np.newaxis, ...])[0, :, :, 0]
result = crop2fullmask(pred, bbox, im_size=image.shape[:2], zero_pad=zero_pad, relax=pad) > threshold

return result

def predict(self, img):
# Preprocess
img = misc.imresize(img, self.input_shape)
img = img.astype('float32')
probs = self.feed_forward(img)
return probs
8 changes: 2 additions & 6 deletions dextr/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,12 @@
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import ZeroPadding2D
from keras.layers import BatchNormalization
from keras.models import Model

import keras.backend as K

from networks.classifiers import build_pyramid_pooling_module


def BN(axis, name=""):
return BatchNormalization(axis=axis, momentum=0.1, name=name, epsilon=1e-5)
from .classifiers import build_pyramid_pooling_module
from .helpers import BN


def identity_block(input_tensor, kernel_size, filters, stage, block, dilation=1):
Expand Down
12 changes: 12 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
version: "3"

services:
example:
ports:
- "8888:8888"
volumes:
- ./models:/workspace/models
- ./examples:/workspace/notebooks
- ./dextr:/workspace/libs/dextr
- ./imgs:/workspace/imgs
build: .
Loading

0 comments on commit 30ec317

Please sign in to comment.