test_mlp.py 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. import numpy
  4. import pytest
  5. import torch
  6. __author__ = "Christian Heider Nielsen"
  7. __doc__ = ""
  8. from torch import nn
  9. from draugr.torch_utilities import to_tensor, MLP, constant_init
  10. def test_single_dim():
  11. pos_size = (4,)
  12. a_size = (1,)
  13. model = MLP(input_shape=pos_size, output_shape=a_size)
  14. pos_1 = to_tensor(
  15. numpy.random.rand(64, pos_size[0]), device="cpu", dtype=torch.float
  16. )
  17. print(model(pos_1))
  18. def test_hidden_dim():
  19. pos_size = (4,)
  20. hidden_size = (2, 3)
  21. a_size = (2,)
  22. model = MLP(input_shape=pos_size, hidden_layers=hidden_size, output_shape=a_size)
  23. pos_1 = to_tensor(
  24. numpy.random.rand(64, pos_size[0]), device="cpu", dtype=torch.float
  25. )
  26. print(model(pos_1))
  27. @pytest.mark.skip
  28. def test_multi_dim():
  29. """
  30. TODO: BROKEN!
  31. """
  32. pos_size = (2, 3, 2) # two, 2d tensors, expected flatten
  33. a_size = (2, 4, 5)
  34. model = MLP(input_shape=pos_size, output_shape=a_size)
  35. pos_1 = to_tensor(
  36. numpy.random.rand(64, numpy.prod(pos_size[1:])), device="cpu", dtype=torch.float
  37. )
  38. pos_2 = to_tensor(
  39. numpy.random.rand(64, numpy.prod(pos_size[1:])), device="cpu", dtype=torch.float
  40. )
  41. print(model(pos_1, pos_2))
  42. def test_single_dim2():
  43. """description"""
  44. pos_size = (4,)
  45. a_size = (1,)
  46. model = MLP(input_shape=pos_size, output_shape=a_size)
  47. pos_1 = to_tensor(
  48. numpy.random.rand(64, pos_size[0]), device="cpu", dtype=torch.float
  49. )
  50. print(model(pos_1)[0].shape)
  51. def test_hidden_dim2():
  52. """description"""
  53. pos_size = (3,)
  54. hidden_size = list(range(6, 10))
  55. a_size = (4,)
  56. model = MLP(
  57. input_shape=pos_size,
  58. hidden_layers=hidden_size,
  59. output_shape=a_size,
  60. hidden_layer_activation=torch.nn.Tanh(),
  61. default_init=None,
  62. )
  63. model2 = nn.Sequential(
  64. *[
  65. nn.Linear(3, 6),
  66. nn.Tanh(),
  67. nn.Linear(6, 7),
  68. nn.Tanh(),
  69. nn.Linear(7, 8),
  70. nn.Tanh(),
  71. nn.Linear(8, 9),
  72. nn.Tanh(),
  73. nn.Linear(9, 4),
  74. ]
  75. )
  76. model3 = nn.Sequential(
  77. *[
  78. nn.Linear(3, 6),
  79. nn.Tanh(),
  80. nn.Linear(6, 7),
  81. nn.Tanh(),
  82. nn.Linear(7, 8),
  83. nn.Tanh(),
  84. nn.Linear(8, 9),
  85. nn.Tanh(),
  86. nn.Linear(9, 4),
  87. ]
  88. )
  89. constant_init(model, 0.142)
  90. constant_init(model2, 0.142)
  91. constant_init(model3, 0.142)
  92. print(model, model2, model3)
  93. pos_1 = to_tensor(
  94. numpy.random.rand(64, pos_size[0]), device="cpu", dtype=torch.float
  95. )
  96. print(model(pos_1)[0].shape)
  97. print(model2(pos_1).shape)
  98. print(model3(pos_1).shape)
  99. def test_multi_dim_in():
  100. """description"""
  101. pos_size = (2, 3, 2)
  102. a_size = (2, 4, 5)
  103. model = MLP(input_shape=pos_size, output_shape=a_size)
  104. pos_1 = to_tensor(
  105. numpy.random.rand(64, pos_size[0]), device="cpu", dtype=torch.float
  106. )
  107. pos_2 = to_tensor(
  108. numpy.random.rand(64, pos_size[1]), device="cpu", dtype=torch.float
  109. )
  110. pos_3 = to_tensor(
  111. numpy.random.rand(64, pos_size[2]), device="cpu", dtype=torch.float
  112. )
  113. heads = model(pos_1, pos_2, pos_3)
  114. for h in heads:
  115. print(h.shape)
  116. def test_multi_dim_out():
  117. """description"""
  118. pos_size = (10,)
  119. a_size = (2, 1)
  120. model = MLP(input_shape=pos_size, hidden_layers=(100,), output_shape=a_size)
  121. pos_1 = to_tensor(numpy.random.rand(64, *pos_size), device="cpu", dtype=torch.float)
  122. res = model(pos_1)
  123. print(model)
  124. print(len(res), res[0].shape, res[1].shape)
  125. def test_multi_dim_both():
  126. """description"""
  127. pos_size = (2, 3)
  128. a_size = (2, 4, 5)
  129. model = MLP(input_shape=pos_size, output_shape=a_size)
  130. pos_1 = to_tensor(
  131. numpy.random.rand(64, pos_size[0]), device="cpu", dtype=torch.float
  132. )
  133. pos_2 = to_tensor(
  134. numpy.random.rand(64, pos_size[1]), device="cpu", dtype=torch.float
  135. )
  136. res = model(pos_1, pos_2)
  137. print(model)
  138. print(len(res), res[0].shape, res[1].shape, res[2].shape)
  139. def test_auto():
  140. """description"""
  141. pos_size = (4,)
  142. a_size = (2,)
  143. model = MLP(input_shape=pos_size, output_shape=a_size)
  144. pos_1 = to_tensor(
  145. numpy.random.rand(64, pos_size[0]), device="cpu", dtype=torch.float
  146. )
  147. res = model(pos_1)
  148. print(model)
  149. print(len(res), res[0].shape)
  150. if __name__ == "__main__":
  151. test_single_dim()
  152. test_hidden_dim()
  153. # test_multi_dim()