layers.py 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. """
  2. QUADRATIC layers
  3. Here we define linear and quadratic layers that share subunits
  4. """
  5. import tensorflow.compat.v1 as tf
  6. import numpy as np
  7. #tf.compat.v1.disable_eager_execution()
  8. class Dense(object):
  9. """
  10. Dense linear unit
  11. This layer implements a affine transformation followed by pointwise
  12. nonlinearity. It also allows to define a self normalized version where
  13. each of the weight vectors that combine linearly with the input coefficient
  14. have unit norm.
  15. """
  16. def __init__(self, in_dim,
  17. out_dim,
  18. activation=None,
  19. usebias=True,
  20. weightnorm=False):
  21. """
  22. Inputs:
  23. -- in_dim : integer denoting the number of input dimensions
  24. -- out_dim : integer denotign the number of output dimensions
  25. -- activation : (default None) pointwise nonlinearity function
  26. -- usebias : (optional) use bias in the affine transformation
  27. default value is True.
  28. -- weightnorm : (optional) normalizes the columns of the weighmatrix to
  29. have unit norm
  30. """
  31. self.in_dim = in_dim
  32. self.n_units = out_dim
  33. self.usebias = usebias
  34. # initialize the paramters with the good stuff
  35. W_shape = [self.n_units, self.in_dim]
  36. #W_std = 1.0 / np.sqrt(self.n_units*self.in_dim)
  37. W_std = 1.0 / np.sqrt(self.in_dim)
  38. self.W = tf.Variable(tf.random_normal(W_shape, stddev=W_std),
  39. dtype=tf.float32)
  40. if self.usebias is True:
  41. self.bias = tf.Variable(tf.zeros((self.n_units, 1), dtype=tf.float32))
  42. self.activation = activation
  43. self.weightnorm = weightnorm
  44. def propagateForward(self, input):
  45. """
  46. Maps the input array to an output array with the sequence of operations
  47. defined in the layer
  48. Inputs:
  49. -- input : an d-dimensional tensor where where the first axis is the
  50. the number of samples
  51. """
  52. if self.weightnorm is True:
  53. W_norm = tf.divide(self.W, tf.norm(self.W, axis=0, keep_dims=True))
  54. WX = tf.matmul(input, tf.transpose(W_norm))
  55. else:
  56. WX = tf.matmul(input, tf.transpose(self.W))
  57. if self.usebias is True:
  58. lin_term = WX + tf.transpose(self.bias)
  59. else:
  60. lin_term = WX
  61. if self.activation is None:
  62. return lin_term
  63. else:
  64. return self.activation(lin_term)
  65. def __call__(self, input):
  66. return self.propagateForward(input)
  67. def get_trainable_params(self):
  68. """
  69. Returns a tuple with parameters that can be tuned.
  70. This function is useful to manage list of paramters we want to tune or
  71. keep fixed during learning.
  72. """
  73. if self.usebias is True:
  74. return (self.W, self.bias)
  75. else:
  76. return (self.W, )