Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Video Similarity Search: Upload Object Detection #598

Merged
merged 10 commits into from
Aug 17, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
From tensorflow/tensorflow:2.5.0

WORKDIR /app/src
COPY . /app

ENV TF_XLA_FLAGS --tf_xla_cpu_global_jit
RUN mkdir -p /root/.keras/models && mv /app/data/models/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5 /root/.keras/models/

RUN apt-get update
RUN apt-get install -y libsm6 libxext6 libxrender-dev libgl1-mesa-glx
RUN pip3 install -r /app/requirements.txt

CMD python3 main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
Keras==2.6.0
tensorflow==2.6.0
opencv-python
Pillow==8.2.0
pymysql
fastapi
uvicorn
pandas
aiofiles
pymilvus-orm==2.0.0rc2
diskcache
python-multipart
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import os

############### Milvus Configuration ###############
MILVUS_HOST = os.getenv("MILVUS_HOST", "localhost")
MILVUS_PORT = os.getenv("MILVUS_PORT", 19530)
VECTOR_DIMENSION = os.getenv("VECTOR_DIMENSION", 2048)
INDEX_FILE_SIZE = os.getenv("INDEX_FILE_SIZE", 1024)
METRIC_TYPE = os.getenv("METRIC_TYPE", "L2")
DEFAULT_TABLE = os.getenv("DEFAULT_TABLE", "video_obj_det")
TOP_K = os.getenv("TOP_K", 10)

############### Number of log files ###############
LOGS_NUM = os.getenv("logs_num", 0)

############### MySQL Configuration ###############
MYSQL_HOST = os.getenv("MYSQL_HOST", "localhost")
MYSQL_PORT = os.getenv("MYSQL_PORT", 3306)
MYSQL_USER = os.getenv("MYSQL_USER", "root")
MYSQL_PWD = os.getenv("MYSQL_PWD", "123456")
MYSQL_DB = os.getenv("MYSQL_DB", "mysql")

############### Model PATH Configuration ###############
COCO_MODEL_PATH = os.getenv("OBJECT_PATH", "./yolov3_detector/data/yolov3_darknet")
YOLO_CONFIG_PATH = os.getenv("OBJECT_PATH", "./yolov3_detector/data/yolov3_darknet/yolo.yml")

CACHE_DIR = "./tmp"
DATA_PATH = os.getenv("DATA_PATH", "data/example_object")
UPLOAD_PATH = os.getenv("UPLOAD_PATH", "data/example_video")
DISTANCE_LIMIT = None
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import numpy as np
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow.keras.applications.resnet50 import preprocess_input as preprocess_input_resnet50
from tensorflow.keras.preprocessing import image
from numpy import linalg as LA


# os.environ['KERAS_HOME'] = os.path.abspath(os.path.join('.', 'data'))


class CustomOperator:
def __init__(self):
self.input_shape = (224, 224, 3)
self.weight = 'imagenet'
self.pooling = 'max'
self.model_resnet50 = ResNet50(weights='imagenet',
input_shape=(self.input_shape[0], self.input_shape[1], self.input_shape[2]),
pooling=self.pooling, include_top=False)
self.model_resnet50.predict(np.zeros((1, 224, 224, 3)))

def execute(self, img_path):
# Return the embedding([[list]]) of the images
img = image.load_img(img_path, target_size=(self.input_shape[0], self.input_shape[1]))
img = image.img_to_array(img)
img = np.expand_dims(img, axis=0)
img = preprocess_input_resnet50(img)
feat = self.model_resnet50.predict(img)
norm_feat = feat[0] / LA.norm(feat[0])
return norm_feat.tolist()

'''
if __name__ == '__main__':
model = CustomOperator()
x = model.execute('data/example_object/water.jpg')
print(x)
'''
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import cv2
import uuid
import os
from config import UPLOAD_PATH


class FrameExtract:
def __init__(self, fps=1):
self.fps = fps

def extract_frame(self, file_path):
prefix = file_path.split("/")[-1].split(".")[0] + "-" + str(uuid.uuid1())
count, frame_count = 0, 0
cap = cv2.VideoCapture(file_path)
framerate = cap.get(cv2.CAP_PROP_FPS)
allframes = int(cv2.VideoCapture.get(cap, int(cv2.CAP_PROP_FRAME_COUNT)))
success, image = cap.read()
# print(UPLOAD_PATH + '/' + prefix)
if not os.path.exists(UPLOAD_PATH + '/' + prefix):
os.mkdir(UPLOAD_PATH + '/' + prefix)
images = []
while success:
if count % (int(framerate)/self.fps) == 0:
file_name = "%s/%s/" % (UPLOAD_PATH, prefix) + '%06d' % (frame_count+1) + '.jpg'
cv2.imwrite(file_name, image)
frame_count += 1
images.append(file_name)
success, image = cap.read()
count += 1
cap.release()
object_path = "%s/%s/" % (UPLOAD_PATH, prefix)
return object_path, images

'''
if __name__ == '__main__':
avi = "/data/example_video/test.avi"
fe = FrameExtract()
object_path, images = fe.extract_frame(avi)
print(object_path)
print("images:", images)
'''
122 changes: 122 additions & 0 deletions solutions/video_similarity_search/object_detection/server/src/logs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import os
import re
import datetime
import logging
import sys
from config import LOGS_NUM

try:
import codecs
except ImportError:
codecs = None


class MultiprocessHandler(logging.FileHandler):
def __init__(self, filename, when='D', backupCount=0, encoding=None, delay=False):
self.prefix = filename
self.backupCount = backupCount
self.when = when.upper()
self.extMath = r"^\d{4}-\d{2}-\d{2}"

self.when_dict = {
'S': "%Y-%m-%d-%H-%M-%S",
'M': "%Y-%m-%d-%H-%M",
'H': "%Y-%m-%d-%H",
'D': "%Y-%m-%d"
}

self.suffix = self.when_dict.get(when)
if not self.suffix:
print('The specified date interval unit is invalid: ', self.when)
sys.exit(1)

self.filefmt = os.path.join('.', "logs", "%s-%s.log" % (self.prefix, self.suffix))

self.filePath = datetime.datetime.now().strftime(self.filefmt)

_dir = os.path.dirname(self.filefmt)
try:
if not os.path.exists(_dir):
os.makedirs(_dir)
except Exception as e:
print('Failed to create log file: ', e)
print("log_path:" + self.filePath)
sys.exit(1)

if codecs is None:
encoding = None

logging.FileHandler.__init__(self, self.filePath, 'a+', encoding, delay)

def shouldChangeFileToWrite(self):
_filePath = datetime.datetime.now().strftime(self.filefmt)
if _filePath != self.filePath:
self.filePath = _filePath
return True
return False

def doChangeFile(self):
self.baseFilename = os.path.abspath(self.filePath)
if self.stream:
self.stream.close()
self.stream = None

if not self.delay:
self.stream = self._open()
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)

def getFilesToDelete(self):
dir_name, _ = os.path.split(self.baseFilename)
file_names = os.listdir(dir_name)
result = []
prefix = self.prefix + '-'
for file_name in file_names:
if file_name[:len(prefix)] == prefix:
suffix = file_name[len(prefix):-4]
if re.compile(self.extMath).match(suffix):
result.append(os.path.join(dir_name, file_name))
result.sort()

if len(result) < self.backupCount:
result = []
else:
result = result[:len(result) - self.backupCount]
return result

def emit(self, record):
try:
if self.shouldChangeFileToWrite():
self.doChangeFile()
logging.FileHandler.emit(self, record)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)


def write_log():
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# formatter = '%(asctime)s | %(levelname)s | %(filename)s | %(funcName)s | %(module)s | %(lineno)s | %(message)s'
fmt = logging.Formatter(
'%(asctime)s | %(levelname)s | %(filename)s | %(funcName)s | %(lineno)s | %(message)s')

stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(fmt)

log_name = "milvus"
file_handler = MultiprocessHandler(log_name, when='D', backupCount=LOGS_NUM)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(fmt)
file_handler.doChangeFile()

logger.addHandler(stream_handler)
logger.addHandler(file_handler)

return logger


LOGGER = write_log()
Loading