-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhelpers.py
78 lines (61 loc) · 2.13 KB
/
helpers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import numpy as np
import tensorflow as tf
###############################################################################
def flatten(x, name=None, reuse=None):
"""Flatten Tensor to 2-dimensions.
Parameters
----------
x : tf.Tensor
Input tensor to flatten.
name : None, optional
Variable scope for flatten operations
Returns
-------
flattened : tf.Tensor
Flattened tensor.
"""
with tf.variable_scope('flatten'):
dims = x.get_shape().as_list()
if len(dims) == 4:
flattened = tf.reshape(
x,
shape=[-1, dims[1] * dims[2] * dims[3]])
elif len(dims) == 2 or len(dims) == 1:
flattened = x
else:
raise ValueError('Expected n dimensions of 1, 2 or 4. Found:',
len(dims))
return flattened
###############################################################################
def linear(X, n_outputs, scope=None, activation=None, reuse=None):
if X.get_shape() != 2:
X = flatten(X)
n_inputs = X.get_shape().as_list()[1]
with tf.variable_scope(scope or "fully_connected", reuse=reuse):
weights = tf.get_variable(
name="weights",
dtype=tf.float32,
shape=[n_inputs, n_outputs],
initializer=tf.random_normal_initializer(stddev=0.1)
)
biases = tf.get_variable(
name="biases",
dtype=tf.float32,
shape=[n_outputs],
initializer=tf.constant_initializer(value=0)
)
hidden = tf.matmul(X, weights) + biases
if activation:
hidden = activation(hidden)
return hidden
###############################################################################
def im_split(img):
xs = [] # To store positions (x, y)
ys = [] # to store Colors (R, G, B)
for row_i in range(img.shape[0]):
for col_i in range(img.shape[1]):
xs.append([row_i, col_i])
ys.append(img[row_i, col_i])
xs = np.array(xs)
ys = np.array(ys)
return xs, ys