-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathutilities.py
152 lines (106 loc) · 3.61 KB
/
utilities.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
import numpy as np
import tensorflow as tf
from collections import OrderedDict
def mat_argmax(m_A):
""" returns tuple with indices of max entry of matrix m_A"""
num_cols = m_A.shape[1]
assert m_A.ndim == 2
ind = np.argmax(m_A)
row = ind // num_cols
col = ind % num_cols
return (row, col)
def mat_argmin(m_A):
""" returns tuple with indices of min entry of matrix m_A"""
num_cols = m_A.shape[1]
assert m_A.ndim == 2
ind = np.argmin(m_A)
row = ind // num_cols
col = ind % num_cols
return (row, col)
def print_time(start_time, end_time):
td = end_time - start_time
hours = td.seconds // 3600
reminder = td.seconds % 3600
minutes = reminder // 60
seconds = (td.seconds - hours * 3600 -
minutes * 60) + td.microseconds / 1e6
time_str = ""
if td.days:
time_str = "%d days, " % td.days
if hours:
time_str = time_str + "%d hours, " % hours
if minutes:
time_str = time_str + "%d minutes, " % minutes
if time_str:
time_str = time_str + "and "
time_str = time_str + "%.3f seconds" % seconds
#set_trace()
print("Elapsed time = ", time_str)
def empty_array(shape):
return np.full(shape, fill_value=None, dtype=float)
def project_to_interval(x, a, b):
assert a <= b
return np.max([np.min([x, b]), a])
def watt_to_dbm(array):
assert (array > 0).all()
return 10 * np.log10(array) + 30
def dbm_to_watt(array):
return 10**((array - 30) / 10)
def natural_to_dB(array): # array is power gain
return 10 * np.log10(array)
def dB_to_natural(array): # array is power gain
return 10**(array / 10)
def save_l_var_vals(l_vars):
# returns a list of tensors with the values of the
# variables in the list l_vars.
l_vals = []
for var in l_vars:
l_vals.append(tf.convert_to_tensor(var))
return l_vals
def restore_l_var_vals(l_vars, l_vals):
assert len(l_vars) == len(l_vals)
# assigns the value l_vals[i] to l_vars[i]
for var, val in zip(l_vars, l_vals):
var.assign(val)
class FifoUniqueQueue():
"""FIFO Queue that does not push a new element if it is already in the queue. Pushing an element already in the queue does not change the order of the queue.
It seems possible to implement this alternatively as a simple list.
"""
def __init__(self):
self._dict = OrderedDict()
def put(self, key):
self._dict[key] = 0 # dummy value, for future usage
def get(self):
# Returns oldest item
return self._dict.popitem(last=False)[0]
def empty(self):
return len(self._dict) == 0
def list_are_close(list1, list2):
"""
Args:
`list1` can be a tensor or a list of tensors
`list2` can be a tensor or a list of tensors
Returns:
True if list1 and list2 are the same.
"""
assert type(list1) == type(list2)
def compare(x, y):
# return tf.abs(x - y)
return tf.math.reduce_all(np.core.numeric.isclose(x, y, rtol=1e-6, atol=0,
equal_nan=True))
if type(list1) != list:
return compare(list1, list2)
else:
return tf.reduce_all([compare(x, y) for x, y in zip(list1, list2)])
# a = np.random.random(size=(2,3,4,1))
# b = np.random.random(size=(2,3,4,1))
# l_1 = [a, b]
# l_2 = [a, a]
# # l_3 = [l_1, l_1, l_2]
# #
# # l_4 =[l_2, l_1, l_1]
# # l_5 = [l_3, l_4]
# # l_6 = [l_3, l_3]
# e = list_are_close(l_1, l_2)
# #
# print( e)