manual_FFNet.cpp 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466
  1. /* manual_FFNet.c
  2. *
  3. * Copyright (C) 1994-2013, 2016 David Weenink
  4. *
  5. * This code is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License as published by
  7. * the Free Software Foundation; either version 2 of the License, or (at
  8. * your option) any later version.
  9. *
  10. * This code is distributed in the hope that it will be useful, but
  11. * WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this work. If not, see <http://www.gnu.org/licenses/>.
  17. */
  18. /*
  19. djmw 20020408 GPL
  20. */
  21. #include "ManPagesM.h"
  22. #include "FFNet.h"
  23. static void drawFFNet_345 (Graphics g)
  24. {
  25. autoFFNet me = FFNet_create (3, 4, 0, 5, false);
  26. FFNet_drawTopology (me.get(), g);
  27. }
  28. void manual_FFNet_init (ManPages me);
  29. void manual_FFNet_init (ManPages me)
  30. {
  31. MAN_BEGIN (U"epoch", U"djmw", 20040428)
  32. INTRO (U"A term that is often used in the context of machine learning. An epoch is one complete "
  33. "presentation of the %%data set to be learned% to a learning machine.")
  34. NORMAL (U"Learning machines like @@FFNet|feedforward neural nets@ that use iterative algorithms "
  35. "often need many epochs during their learning phase.")
  36. NORMAL (U"A @@Discriminant|discriminant classifier@ is also a learning machine. "
  37. "However, in contrast with neural nets a discriminant classifier only needs one epoch to learn.")
  38. MAN_END
  39. MAN_BEGIN (U"Feedforward neural networks", U"djmw", 20040511)
  40. INTRO (U"This tutorial describes the use of @FFNet feedforward neural networks in P\\s{RAAT}. ")
  41. NORMAL (U"@@Feedforward neural networks 1. What is a feedforward neural network?|1. What is a feedforward neural network?@")
  42. LIST_ITEM (U" @@Feedforward neural networks 1.1. The learning phase|1.1 The learning phase")
  43. LIST_ITEM (U" @@Feedforward neural networks 1.2. The classification phase|1.2 The classification phase")
  44. NORMAL (U"@@Feedforward neural networks 2. Quick start|2. Quick start@")
  45. NORMAL (U"@@Feedforward neural networks 3. FFNet versus discriminant classifier|3. FFNet versus discriminant classifier@")
  46. NORMAL (U"@@Feedforward neural networks 4. Command overview|4. Command overview@")
  47. MAN_END
  48. MAN_BEGIN (U"Feedforward neural networks 1. What is a feedforward neural network?", U"djmw", 20040426)
  49. INTRO (U"A feedforward neural network is a biologically inspired classification algorithm. "
  50. "It consist of a (possibly large) number of simple neuron-like processing %units, organized in %layers. "
  51. "Every unit in a layer is connected with all the units in the previous layer. "
  52. "These connections are not all equal: each connection may have a different strength or %weight. "
  53. "The weights on these connections encode the knowledge of a network. "
  54. "Often the units in a neural network are also called %nodes.")
  55. NORMAL (U"Data enters at the inputs and passes through the network, layer by layer, until it arrives at the outputs. "
  56. "During normal operation, that is when it acts as a classifier, there is no feedback between layers. "
  57. "This is why they are called %%feedforward% neural networks. ")
  58. NORMAL (U"In the following figure we see an example of a 2-layered network with, from top to bottom: "
  59. "an output layer with 5 units, a %hidden layer with 4 units, respectively. The network has 3 input units.")
  60. PICTURE (5, 5, drawFFNet_345)
  61. NORMAL (U"The 3 inputs are shown as circles and these do not belong to any layer of the network (although the inputs "
  62. "sometimes are considered as a virtual layer with layer number 0). Any layer that is not an output layer is a "
  63. "%hidden layer. This network therefore has 1 hidden layer and 1 output layer. The figure also shows all the "
  64. "connections between the units in different layers. A layer only connects to the previous layer. ")
  65. NORMAL (U"The operation of this network can be divided into two phases:")
  66. NORMAL (U"@@Feedforward neural networks 1.1. The learning phase|1. The learning phase")
  67. NORMAL (U"@@Feedforward neural networks 1.2. The classification phase|2. The classification phase")
  68. MAN_END
  69. MAN_BEGIN (U"Feedforward neural networks 1.1. The learning phase", U"djmw", 20040428)
  70. INTRO (U"During the learning phase the weights in the FFNet will be modified. "
  71. "All weights are modified in such a way that when a pattern is presented, "
  72. "the output unit with the correct category, hopefully, will have the largest output value.")
  73. ENTRY (U"How does learning take place?")
  74. NORMAL (U"The FFNet uses a %supervised learning algorithm: besides the input pattern, "
  75. "the neural net also needs to know to what category the pattern belongs. "
  76. "Learning proceeds as follows: a pattern is presented at the inputs. "
  77. "The pattern will be transformed in its passage through the layers of the network until it "
  78. "reaches the output layer. The units in the output layer all belong to a different category. "
  79. "The outputs of the network as they are now are compared with the outputs as they ideally would "
  80. "have been if this pattern were correctly classified: in the latter case "
  81. "the unit with the correct category would have had the largest output value and the "
  82. "output values of the other output units would have been very small. "
  83. "On the basis of this comparison all the connection weights are modified a little bit to guarantee that, the next time "
  84. "this same pattern is presented at the inputs, the value of the output unit that corresponds with the correct category "
  85. "is a little bit higher than it is now and that, at the same time, the output values of all the other incorrect outputs are a "
  86. "little bit lower than they are now. (The differences between the actual outputs and the idealized outputs "
  87. "are propagated back from the top layer to lower layers to be used at these layers to modify connection weights. "
  88. "This is why the term %%backpropagation network% is also often used to describe this type of neural network.)")
  89. NORMAL (U"If you perform the procedure above once for every pattern and category pair in your data "
  90. "set you have performed one @epoch of learning.")
  91. NORMAL (U"The hope is that eventually, probably after many epochs, "
  92. "the neural net will come to remember these pattern-category pairs. "
  93. "You even hope that the neural net, when the learning phase has terminated, will be able to %generalize "
  94. "and has learned to "
  95. "@@FFNet & PatternList: To Categories...|classify@ correctly any unknown pattern presented to it. ")
  96. NORMAL (U"Because real-life data often contains noise as well as partly contradictory information, "
  97. "these hopes can be fulfilled only partly. ")
  98. NORMAL (U"For @@FFNet & PatternList & Categories: Learn...|learning@ you "
  99. "need to select three different objects together: a FFNet (the %classifier), "
  100. "a PatternList (the %inputs) and a Categories (the %%correct outputs%).")
  101. ENTRY (U"How long will the learning phase take?")
  102. NORMAL (U"In general, this question is hard to answer. It depends on the size of the neural network, "
  103. "the number of patterns to be learned, the number of epochs, the tolerance of the minimizer "
  104. "and the speed of your computer, how much computing time the learning phase may take. ")
  105. NORMAL (U"If computing time becomes excessive in your interactive environment then consider using the "
  106. "powerful @@Scripting|scripting@ facilities in Praat to process your learning job as a batch job. ")
  107. MAN_END
  108. MAN_BEGIN (U"Feedforward neural networks 1.2. The classification phase", U"djmw", 20040428)
  109. INTRO (U"In the classification phase, the weights of the network are fixed. ")
  110. NORMAL (U"A pattern, presented at the inputs, will be transformed from layer to layer until it reaches the output layer. "
  111. "Now classification can occur by selecting the category associated with the output unit that has "
  112. "the largest output value. "
  113. "For classification we only need to select an FFNet and a PatternList together and "
  114. "choose @@FFNet & PatternList: To Categories...|To Categories...@. ")
  115. NORMAL (U"In contrast to the @@Feedforward neural networks 1.1. The learning phase|learning phase@ classification is very fast.")
  116. MAN_END
  117. MAN_BEGIN (U"Feedforward neural networks 2. Quick start", U"djmw", 20040426)
  118. INTRO (U"You may create the iris example set with the @@Create iris example...@ command "
  119. "that you will find under the ##Feedforward neural networks# option in the #New menu. "
  120. "Three new objects will appear in the @@List of Objects@: a @FFNet, a @Categories and "
  121. "a @PatternList.")
  122. NORMAL (U"The #PatternList contains the @@iris data set@ in a matrix of 150 rows by 4 columns. "
  123. "To guarantee that every cell in the PatternList is in the [0,1] interval, all measurement "
  124. "values were divided by 10. In the #Categories the three iris species %setosa, "
  125. "%versicolor, and %virginica were categorized with the numbers #1, #2 and #3, respectively. "
  126. "Because there are 4 data columns in the PatternList and 3 different iris species in the Categories, "
  127. "the newly created #FFNet has 4 inputs and 3 outputs. "
  128. "If you enter a positive number in one of the fields in the form, the FFNet will have "
  129. "this number of units in a %%hidden layer%. The name of the newly created FFNet "
  130. "will reflect its topology. If you opt for the standard setting, which is 0 hidden units, the FFNet will be named 4-3.")
  131. ENTRY (U"Learning the iris data")
  132. NORMAL (U"The first thing you probably might want to do is to let the #FFNet learn the association in "
  133. "each pattern-category pair. To do this select all three objects together and choose "
  134. "@@FFNet & PatternList & Categories: Learn...|Learn...@. "
  135. "A form will appear, asking you to supply some settings for "
  136. "the learning algorithm. Learning starts after you have clicked the OK button. "
  137. "As the example network has only a small number of weights that need to be adjusted, "
  138. "and the learning data set is very small, this will only take a very short time.")
  139. ENTRY (U"Classification")
  140. NORMAL (U"Now, if you are curious how well the FFNet has learned the iris data, you may select the "
  141. "#FFNet and the #PatternList together and choose @@FFNet & PatternList: To Categories...|To Categories...@. "
  142. "A new #Categories appears in the ##List of Objects# with the name %%4-3_iris% (if %%4-3% was the name of the FFNet and %iris% the name of the PatternList). "
  143. "We have two different Categories in the list of objects, the topmost one has the original categories, the other "
  144. "the categories as were assigned by the FFNet classifier. The obvious thing to do now is to compare the "
  145. "original categories with the assigned categories by making a @@Confusion|confusion table@. "
  146. "Select the two #Categories and choose @@Categories: To Confusion|To Confusion@ and a newly "
  147. "created @Confusion appears. Pressing the @Info button will show you an info window with, "
  148. "among others, the fraction correct. ")
  149. NORMAL (U"You may also want to "
  150. "@@Feedforward neural networks 3. FFNet versus discriminant classifier|compare the FFNet classifier with a discriminant classifier@.")
  151. ENTRY (U"Create other neural net topologies")
  152. NORMAL (U"With a #PatternList and a #Categories selected together, you can for example create a new #FFNet of a different topology.")
  153. MAN_END
  154. MAN_BEGIN (U"Feedforward neural networks 3. FFNet versus discriminant classifier", U"djmw", 20040426)
  155. NORMAL (U"You may want to compare the FFNet classifier with a discriminant classifier. "
  156. "Unlike the FFNet, a @@Discriminant|discriminant@ classifier does not need any iterative procedure in the "
  157. "learning phase and can be used immediately after creation for classification. "
  158. "The following three simple steps will give you the confusion matrix based on discriminant analysis:")
  159. LIST_ITEM (U"1. Select the PatternList and the Categories together and choose ##To Discriminant#. "
  160. "A newly created Discriminant will appear.")
  161. LIST_ITEM (U"2. Select the Discriminant and the PatternList together and choose ##To Categories...#. A newly created @Categories will appear.")
  162. LIST_ITEM (U"3. Select the two appropriate Categories and choose @@categories: To Confusion|To Confusion@. "
  163. "A newly created @Confusion will appear. After pushing the @Info button, the info window will "
  164. "show you the fraction correct.")
  165. NORMAL (U"See also the @@Discriminant analysis@ tutorial for more information.")
  166. MAN_END
  167. MAN_BEGIN (U"Feedforward neural networks 4. Command overview", U"djmw", 20040426)
  168. INTRO (U"FFNet commands")
  169. ENTRY (U"Creation:")
  170. LIST_ITEM (U"\\bu @@PatternList & Categories: To FFNet...@")
  171. LIST_ITEM (U"\\bu @@Create FFNet...@")
  172. ENTRY (U"Learning:")
  173. LIST_ITEM (U"\\bu @@FFNet & PatternList & Categories: Learn...@")
  174. LIST_ITEM (U"\\bu @@FFNet & PatternList & Categories: Learn slow...@")
  175. ENTRY (U"Classification:")
  176. LIST_ITEM (U"\\bu @@FFNet & PatternList: To Categories...@")
  177. ENTRY (U"Drawing:")
  178. LIST_ITEM (U"\\bu @@FFNet: Draw topology@")
  179. LIST_ITEM (U"\\bu @@FFNet: Draw weights...@")
  180. LIST_ITEM (U"\\bu @@FFNet: Draw cost history...@")
  181. ENTRY (U"Queries")
  182. LIST_ITEM (U"\\bu @@FFNet & PatternList & Categories: Get total costs...@")
  183. LIST_ITEM (U"\\bu @@FFNet & PatternList & Categories: Get average costs...@")
  184. LIST_ITEM (U"\\bu @@FFNet & PatternList & ActivationList: Get total costs...@")
  185. LIST_ITEM (U"\\bu @@FFNet & PatternList & ActivationList: Get average costs...@")
  186. ENTRY (U"Analysis:")
  187. LIST_ITEM (U"\\bu ##FFNet & PatternList: To ActivationList...#")
  188. LIST_ITEM (U"\\bu @@FFNet & PatternList & Categories: Get total costs...@")
  189. LIST_ITEM (U"\\bu @@FFNet & PatternList & Categories: Get average costs...@")
  190. LIST_ITEM (U"\\bu @@FFNet & PatternList & ActivationList: Get total costs...@")
  191. LIST_ITEM (U"\\bu @@FFNet & PatternList & ActivationList: Get average costs...@")
  192. ENTRY (U"Analysis:")
  193. LIST_ITEM (U"\\bu ##FFNet & PatternList: To Activation...#")
  194. ENTRY (U"Modification:")
  195. LIST_ITEM (U"\\bu @@FFNet: Reset...@")
  196. LIST_ITEM (U"\\bu ##FFNet: Select biases...#")
  197. LIST_ITEM (U"\\bu ##FFNet: Select all weights#")
  198. MAN_END
  199. MAN_BEGIN (U"FFNet", U"djmw", 19961015)
  200. INTRO (U"One of the @@types of objects@ in P\\s{RAAT}.")
  201. NORMAL (U"A #FFNet models a feedforward neural net. A feedforward "
  202. "neural net can %learn associations between its %input and its %output. "
  203. "The @@Feedforward neural networks@ tutorial gives you an introduction to feedforward neural nets.")
  204. MAN_END
  205. MAN_BEGIN (U"FFNet: Draw topology", U"djmw", 19970218)
  206. INTRO (U"You can choose this command after selecting 1 or more @FFNet's.")
  207. ENTRY (U"Behaviour")
  208. NORMAL (U"Draws all units and all connections of a feedforward neural net.")
  209. MAN_END
  210. MAN_BEGIN (U"FFNet: Draw weights...", U"djmw", 20040422)
  211. INTRO (U"Draws the weights in a layer of the selected @FFNet feedforward neural net.")
  212. ENTRY (U"Settings")
  213. TAG (U"##Layer number")
  214. DEFINITION (U"determines the layer.")
  215. TAG (U"##Garnish")
  216. DEFINITION (U"determines whether additional information is drawn.")
  217. ENTRY (U"Behaviour")
  218. NORMAL (U"The weights are arranged in a matrix. The columns of this matrix are indexed by the units in the layer, "
  219. "while the rows are indexed by the units in the previous layer. There is one extra row for the biases."
  220. "The values of the weights are shown as rectangles. The area of a rectangle is proportional "
  221. "to the value. Negative values are shown as filled black rectangles.")
  222. MAN_END
  223. MAN_BEGIN (U"FFNet: Draw cost history...", U"djmw", 19970218)
  224. INTRO (U"You can choose this command after selecting 1 or more @FFNet's.")
  225. ENTRY (U"Settings")
  226. TAG (U"##Iteration range")
  227. DEFINITION (U"determines the horizontal range of the plot.")
  228. TAG (U"##Cost range")
  229. DEFINITION (U"determines the vertical range of the plot.")
  230. TAG (U"##Garnish")
  231. DEFINITION (U"determines whether a box and axis labels are drawn.")
  232. ENTRY (U"Behaviour")
  233. NORMAL (U"Draws the history of the cost versus iteration number during previous learning.")
  234. MAN_END
  235. MAN_BEGIN (U"FFNet: Extract weights...", U"djmw", 20040422)
  236. INTRO (U"Extract all the weights, from all the units in the specified layer of the selected "
  237. "@FFNet, to a @TableOfReal.")
  238. ENTRY (U"Settings")
  239. TAG (U"##Layer number")
  240. DEFINITION (U"determines the layer.")
  241. ENTRY (U"Behaviour")
  242. NORMAL (U"The weights will be arranged in the TableOfReal as follows: ")
  243. NORMAL (U"The table columns will be indexed by the unit numbers in the selected layer, "
  244. "while the rows will be indexed by the unit numbers from the previous layer. "
  245. "There will be one extra row to accommodate the bias weights. "
  246. "The rows and columns are labelled with layer number and unit number as \"L%i-%j\", where %i is the layer number and "
  247. "%j the unit number from that layer. The layer number for the rows is one less than the layer number in the columns. "
  248. "The last row is labelled as \"Bias\".")
  249. MAN_END
  250. MAN_BEGIN (U"FFNet: Get number of outputs", U"djmw", 20040420)
  251. INTRO (U"Queries the selected @FFNet for the number of output units in the output layer. ")
  252. MAN_END
  253. MAN_BEGIN (U"FFNet: Get number of inputs", U"djmw", 20040420)
  254. INTRO (U"Queries the selected @FFNet for the number of inputs. ")
  255. NORMAL (U"For a network with only one layer, the inputs are connected directly to the output layer. "
  256. "In a two-layer network the inputs are connected to a hidden layer.")
  257. MAN_END
  258. MAN_BEGIN (U"FFNet: Get number of hidden units...", U"djmw", 20040420)
  259. INTRO (U"Queries the selected @FFNet for the number of units in a hidden layer.")
  260. ENTRY (U"Settings")
  261. TAG (U"##Hidden layer number")
  262. DEFINITION (U"determines the layer that is queried.")
  263. ENTRY (U"Layer numbering")
  264. NORMAL (U"The number of hidden layers is always one less than the total number of layers in a FFNet. "
  265. "A network with the output units connected to the inputs therefore has only 1 layer, the output layer and "
  266. "no hidden layers. ")
  267. MAN_END
  268. MAN_BEGIN (U"FFNet: Get number of hidden weights...", U"djmw", 20040420)
  269. INTRO (U"Queries the selected @FFNet for the number of weights in a hidden layer.")
  270. ENTRY (U"Settings")
  271. TAG (U"##Hidden layer number")
  272. DEFINITION (U"determines the layer that is queried.")
  273. MAN_END
  274. MAN_BEGIN (U"FFNet: Reset...", U"djmw", 20040420)
  275. INTRO (U"You can choose this command after selecting 1 or more @FFNet's.")
  276. ENTRY (U"WARNING")
  277. NORMAL (U"This command destroys all previous learning.")
  278. ENTRY (U"Settings")
  279. TAG (U"##Range")
  280. DEFINITION (U"determines the upper limit of the [-%range, +%range] interval from "
  281. "which new weights will be randomly selected.")
  282. ENTRY (U"Behaviour")
  283. NORMAL (U"All (selected) weights are reset to random numbers uniformly drawn from the interval [-%range, +%range]. "
  284. "This command also clears the cost history.")
  285. MAN_END
  286. MAN_BEGIN (U"FFNet: Select biases...", U"djmw", 20040422)
  287. INTRO (U"Selects only the biases in one particular layer as subject for modification during learning of the @FFNet.")
  288. ENTRY (U"Settings")
  289. TAG (U"##Layer number")
  290. DEFINITION (U"determines the layer whose biases will be modified.")
  291. ENTRY (U"Behaviour")
  292. NORMAL (U"This command induces very specific behaviour during a following learning phase. "
  293. "Instead of all the weights, only the biases in the specified layer will be changed during learning and the "
  294. "rest of the weights stay fixed. ")
  295. MAN_END
  296. #define FFNet_Create_COMMON_HELP_INOUT \
  297. ENTRY (U"Settings")\
  298. TAG (U"##Number of inputs")\
  299. DEFINITION (U"the dimension of the input of the neural net.")\
  300. TAG (U"##Number of outputs (\\>_ 1)#")\
  301. DEFINITION (U"the number of different categories that you want the net to learn.")
  302. #define FFNet_Create_COMMON_HELP_HIDDEN \
  303. TAG (U"##Number of units in hidden layer 1#, ##Number of units in hidden layer 2#") \
  304. DEFINITION (U"determine the number of units in the hidden layers. " \
  305. "If you want a neural net with no hidden layers, both numbers have to be 0. "\
  306. "If you want a neural net with only 1 hidden layer then one of these numbers has to differ from 0. ")
  307. MAN_BEGIN (U"Create FFNet...", U"djmw", 20040420)
  308. INTRO (U"Create a new feedforward neural net of type @FFNet.")
  309. FFNet_Create_COMMON_HELP_INOUT
  310. FFNet_Create_COMMON_HELP_HIDDEN
  311. MAN_END
  312. MAN_BEGIN (U"Create FFNet (linear outputs)...", U"djmw", 20040422)
  313. INTRO (U"Create a @FFNet feedforward neural network whose output units are linear.")
  314. FFNet_Create_COMMON_HELP_INOUT
  315. FFNet_Create_COMMON_HELP_HIDDEN
  316. MAN_END
  317. MAN_BEGIN (U"Create iris example...", U"djmw", 20160524)
  318. INTRO (U"A @FFNet feedforward neural net will be created together with two other objects: "
  319. "a @PatternList and a @Categories. The PatternList will contain the observations in the @@iris data set@, "
  320. "and the Categories will contain the 3 different iris species categorized by numbers.")
  321. ENTRY (U"Settings")
  322. FFNet_Create_COMMON_HELP_HIDDEN
  323. NORMAL (U"For this simple data you can leave both hidden layers empty.")
  324. MAN_END
  325. MAN_BEGIN (U"iris data set", U"djmw", 19961015)
  326. NORMAL (U"A data set with 150 random samples of flowers from the iris species %setosa, "
  327. "%versicolor, and %virginica collected by @@Anderson (1935)@. From each species there are 50 observations for "
  328. "sepal length, sepal width, petal length, and petal width in cm. This dataset was "
  329. "used by @@Fisher (1936)@ in his initiation of the linear-discriminant-function technique.")
  330. MAN_END
  331. MAN_BEGIN (U"FFNet: Categories", U"djmw", 19960918)
  332. INTRO (U"The categories for training a neural net with a @PatternList. ")
  333. ENTRY (U"Preconditions")
  334. NORMAL (U"The number of categories in a @Categories must equal the number of rows in #PatternList.")
  335. MAN_END
  336. MAN_BEGIN (U"ActivationList", U"djmw", 20160524)
  337. INTRO (U"A list of activations, organized as a @Matrix whose elements should be >= 0 and <= 1. "
  338. "Classification: the response of a particular layer in a neural net to a @PatternList."
  339. "Learning: the desired response of the output layer in a neural net to a @PatternList.")
  340. MAN_END
  341. MAN_BEGIN (U"FFNet: Principal components", U"djmw", 19960918)
  342. INTRO (U"When you select @FFNet and @Eigen the decision planes of layer 1 are drawn in the PC-plane.\n")
  343. MAN_END
  344. MAN_BEGIN (U"FFNet & PatternList: To Categories...", U"djmw", 19960918)
  345. INTRO (U"The @FFNet is used as a classifier. Each pattern from the @PatternList will be "
  346. "classified into one of the FFNet's categories.")
  347. MAN_END
  348. MAN_BEGIN (U"PatternList & Categories: To FFNet...", U"djmw", 20040422)
  349. INTRO (U"Create a new @FFNet feedforward neural network. "
  350. "The number of inputs of the newly created FFNet will be equal to the number of "
  351. "columns in the @PatternList and the number of outputs "
  352. "will be equal to the number of unique categories in the @Categories.")
  353. ENTRY (U"Settings")
  354. FFNet_Create_COMMON_HELP_HIDDEN
  355. MAN_END
  356. MAN_BEGIN (U"FFNet & PatternList & Categories: Learn slow...", U"djmw", 19960918)
  357. INTRO (U"To learn an association you have to select a @FFNet, a @PatternList and a @Categories object.")
  358. ENTRY (U"Preconditions")
  359. LIST_ITEM (U"The number of columns in a #PatternList must equal the number of input units of #FFNet.")
  360. ENTRY (U" Algorithm")
  361. NORMAL (U"Steepest descent")
  362. ENTRY (U"Preconditions")
  363. LIST_ITEM (U"The number of rows in a #PatternList must equal the number of categories in a #Categories.")
  364. LIST_ITEM (U"The number of unique categories in a #Categories must equal the number of output units in #FFNet.")
  365. MAN_END
  366. MAN_BEGIN (U"FFNet & PatternList & Categories: Learn...", U"djmw", 20040511)
  367. INTRO (U"You can choose this command after selecting one @PatternList, one @Categories and one @FFNet.")
  368. ENTRY (U"Settings")
  369. TAG (U"##Maximum number of epochs")
  370. DEFINITION (U"the maximum number of times that the complete #PatternList dataset will be presented to the neural net.")
  371. TAG (U"##Tolerance of minimizer")
  372. DEFINITION (U"when the difference in costs between two successive learning cycles is "
  373. "smaller than this value, the minimization process will be stopped.")
  374. NORMAL (U"##Cost function")
  375. LIST_ITEM (U"Minimum-squared-error:")
  376. LIST_ITEM (U" %costs = \\su__%allPatterns_ \\su__%allOutputs_ (%o__%k_ - d__%k_)^2, where")
  377. LIST_ITEM (U" %o__%k_ : actual output of unit %k")
  378. LIST_ITEM (U" %d__%k_ : desired output of unit %k")
  379. LIST_ITEM (U"Minimum-cross-entropy:")
  380. LIST_ITEM (U" %costs = - \\su__%allPatterns_ \\su__%allOutputs_ (%d__%k_ \\.c ln %o__%k_ + (1-%d__%k_) \\.c ln (1-%o__%k_))")
  381. ENTRY (U"Algorithm")
  382. NORMAL (U"The minimization procedure is a variant of conjugate gradient minimization, "
  383. "see for example @@Press et al. (1992)@, chapter 10, or @@Nocedal & Wright (1999)@, chapter 5.")
  384. MAN_END
  385. MAN_BEGIN (U"FFNet & PatternList & Categories: Get total costs...", U"djmw", 20041118)
  386. INTRO (U"Query the selected @FFNet, @PatternList and @Categories for the total costs.")
  387. ENTRY (U"Algorithm")
  388. NORMAL (U"All patterns are propagated and the total costs are calculated as is shown in @@FFNet & PatternList & Categories: Learn...@. ")
  389. MAN_END
  390. MAN_BEGIN (U"FFNet & PatternList & ActivationList: Get total costs...", U"djmw", 20160524)
  391. INTRO (U"Query the selected @FFNet, @PatternList and @ActivationList for the total costs.")
  392. ENTRY (U"Algorithm")
  393. NORMAL (U"All patterns are propagated and the total costs are calculated as is shown in @@FFNet & PatternList & Categories: Learn...@. ")
  394. MAN_END
  395. MAN_BEGIN (U"FFNet & PatternList & Categories: Get average costs...", U"djmw", 20041118)
  396. INTRO (U"Query the selected @FFNet, @PatternList and @Categories for the average costs.")
  397. ENTRY (U"Algorithm")
  398. NORMAL (U"All patterns are propagated and the total costs are calculated as is shown in @@FFNet & PatternList & Categories: Learn...@. "
  399. "These total costs are then divided by the number of patterns.")
  400. MAN_END
  401. MAN_BEGIN (U"FFNet & PatternList & ActivationList: Get average costs...", U"djmw", 20160526)
  402. INTRO (U"Query the selected @FFNet, @PatternList and @ActivationList for the average costs.")
  403. ENTRY (U"Algorithm")
  404. NORMAL (U"All patterns are propagated and the total costs are calculated as is shown in @@FFNet & PatternList & Categories: Learn...@. "
  405. "These total costs are then divided by the number of patterns.")
  406. MAN_END
  407. MAN_BEGIN (U"Anderson (1935)", U"djmw", 20040423)
  408. NORMAL (U"E. Anderson (1935): \"The irises of the Gasp\\e' peninsula.\" "
  409. "%%Bulletin of the American Iris Society% #59: 2\\--5.")
  410. MAN_END
  411. MAN_BEGIN (U"Fisher (1936)", U"djmw", 19980114)
  412. NORMAL (U"R.A. Fisher (1936): \"The use of multiple measurements in taxonomic "
  413. "problems.\" %%Annals of Eugenics% #7: 179\\--188.")
  414. MAN_END
  415. MAN_BEGIN (U"Nocedal & Wright (1999)", U"djmw", 20040511)
  416. NORMAL (U"J. Nocedal & S.J. Wright (1999): %%Numerical optimization.% Springer.")
  417. MAN_END
  418. }
  419. /* End of file manual_FFNet.c */