Skip to content

Commit

Permalink
Merge pull request #5 from QVPR/v1.1.0-alpha
Browse files Browse the repository at this point in the history
V1.0.0 alpha to v1.0.0 full main merge
  • Loading branch information
AdamDHines authored Sep 13, 2023
2 parents 44ccd2c + c44104c commit 297846c
Show file tree
Hide file tree
Showing 10 changed files with 1,146 additions and 620 deletions.
Binary file modified .DS_Store
Binary file not shown.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
weights/
output/
__pycache__/
src/__pycache__/
931 changes: 393 additions & 538 deletions VPRTempo.py

Large diffs are not rendered by default.

Binary file added assets/github_image.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file removed output/.DS_Store
Binary file not shown.
8 changes: 1 addition & 7 deletions src/blitnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,6 @@ def addLayer(net,dims,thr_range,fire_rate,ip_rate,const_inp,nois,rec_spks):
net['spikes'].append(torch.empty([],dtype=torch.float64))
net['rec_spks'].append(rec_spks)

return len(net['x'])-1

##################################
# Add a set of random connections between layers
# net: BITnet instance
Expand Down Expand Up @@ -196,8 +194,6 @@ def addWeights(net,layer_pre,layer_post,W_range,p,stdp_rate):
nrmInh[nrmInh==0.0] = 1.0
net['W'][excIndex][n] = net['W'][excIndex][n,:,:]/nrmExc
net['W'][inhIndex][n] = net['W'][inhIndex][n,:,:]/nrmInh

return len(net['W'])-1

##################################
# Normalise all the firing rates
Expand Down Expand Up @@ -312,9 +308,7 @@ def calc_spikes(net,layersInfo):
for i,eta in enumerate(net['eta_ip']):

if net['rec_spks'][i]:
outspk = (net['x'][i][0,:,:]).detach().cpu().numpy() # detach to numpy for easy plotting
if i == 2:
outspk[outspk<0.05] = 0
outspk = (net['x'][i][0,0,:]).detach().cpu().numpy() # detach to numpy for easy plotting
n_idx = np.nonzero(outspk)
net['spikes'][i].extend([net['step_num']+net['x'][i][0,:,:].detach().cpu().numpy(),n]
for n in n_idx)
Expand Down
10 changes: 5 additions & 5 deletions src/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,14 @@ def createPR(S_in, GThard, GTsoft=None, matching='multi', n_thresh=100):

# single-best-match or multi-match VPR
if matching == 'single':
# count the number of ground-truth positives (GTP)
GTP = np.count_nonzero(GT.any(0))

# GT-values for best match per query (i.e., per column)
GT = GT[np.argmax(S, axis=0), np.arange(GT.shape[1])]
GT = GT[np.nanargmax(S, axis=1), np.arange(GT.shape[1])]

# count the number of ground-truth positives (GTP)
GTP = np.count_nonzero(GT)

# similarities for best match per query (i.e., per column)
S = np.max(S, axis=0)
S = np.nanmax(S, axis=0)

elif matching == 'multi':
# count the number of ground-truth positives (GTP)
Expand Down
112 changes: 112 additions & 0 deletions src/nordland.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
'''
Imports
'''
import os
import re
import shutil
import zipfile

from os import walk

# load and sort the file names in order, not by how OS indexes them
def atoi(text):
return int(text) if text.isdigit() else text

def natural_keys(text):
return [ atoi(c) for c in re.split(r'(\d+)', text) ]

# set the base path to the location of the downloaded Nordland datasets
basePath = ''
assert(os.path.isdir(basePath)),"Please set the basePath to the location of the downloaded Nordland datasets"

# define the subfolders of the Nordland datasets
subPath = ["spring_images_train/section1/","spring_images_train/section2/",
"fall_images_train/section1/","fall_images_train/section2/",
"winter_images_train/section1/","winter_images_train/section2/",
"summer_images_train/section1/","summer_images_train/section2/"]

# set the desired output folder for unzipping and organization
outDir = ''
assert(os.path.isdir(outDir)),"Please set the outDir to the desired output location for unzipping the Nordland datasets"

# define output paths for the data
outPath = [os.path.join(outDir,"spring/"),os.path.join(outDir,"fall/"),
os.path.join(outDir,"winter/"),os.path.join(outDir,"summer/")]

# check for existence of the zip folders, throw exception if missing
zipNames = ["spring_images_train.zip","fall_images_train.zip",
"winter_images_train.zip","summer_images_train.zip"]
for n in zipNames:
if not os.path.exists(basePath+n):
raise Exception('Please ensure dataset .zip folders have been downloaded')

# check if nordland data folders have already been unzipped
zip_flag = []
for n, ndx in enumerate(range(0,len(subPath),2)):
print('Unzipping '+zipNames[n])
if os.path.exists(basePath+subPath[ndx]):
# check if the folder contains any files
file_lst = os.listdir(basePath+subPath[ndx])
# remove folder if it is empty and unzip the data folder
if len(file_lst) == 0:
shutil.rmtree(basePath+subPath[ndx].replace('section1/',''))
with zipfile.ZipFile(basePath+zipNames[n],"r") as zip_ref:
zip_ref.extractall(basePath)
else:
with zipfile.ZipFile(basePath+zipNames[n],"r") as zip_ref:
zip_ref.extractall(basePath)

# load image paths
tempPaths = []
imgPaths = []
for n in range(0,len(subPath)):
tempPaths = []
for (path, dir_names, file_names) in walk(basePath+subPath[n]):
tempPaths.extend(file_names)
# sort image names
tempPaths.sort(key=natural_keys)
tempPaths = [basePath+subPath[n]+s for s in tempPaths]
imgPaths = imgPaths + tempPaths

# if output folders already exist, delete them
for n in outPath:
if os.path.exists(n):
shutil.rmtree(n)
print('Removed pre-existing output folder')

# rename and move the training data to match the nordland_imageNames.txt file
for n in outPath:
os.mkdir(n)
for n, filename in enumerate(imgPaths):
base = os.path.basename(filename)
split_base = os.path.splitext(base)
if int(split_base[0]) < 10:
my_dest = "images-0000"+split_base[0] + ".png"
elif int(split_base[0]) < 100:
my_dest = "images-000"+split_base[0] + ".png"
elif int(split_base[0]) < 1000:
my_dest = "images-00"+split_base[0] + ".png"
elif int(split_base[0]) < 10000:
my_dest = "images-0"+split_base[0] + ".png"
else:
my_dest = "images-"+split_base[0] + ".png"
if "spring" in filename:
out = outPath[0]
elif "fall" in filename:
out = outPath[1]
elif "winter" in filename:
out = outPath[2]
else:
out = outPath[-1]

fileDest = out + my_dest
os.rename(filename, fileDest)

# remove the empty folders
for n, ndx in enumerate(subPath):
if n%2 == 0:
shutil.rmtree(basePath+ndx.replace('section1/',''))
else:
continue

print('Finished unzipping and organizing Nordland dataset')
Loading

0 comments on commit 297846c

Please sign in to comment.