manual_KNN.cpp 44 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562
  1. /* manual_KNN.c
  2. *
  3. * Copyright (C) 2007-2008 Ola Söder
  4. *
  5. * This code is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License as published by
  7. * the Free Software Foundation; either version 2 of the License, or (at
  8. * your option) any later version.
  9. *
  10. * This code is distributed in the hope that it will be useful, but
  11. * WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  13. * General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this work. If not, see <http://www.gnu.org/licenses/>.
  17. */
  18. /*
  19. * os 2008/05/29 Initial release
  20. * os 2009/01/23 Removed irrelevant information
  21. * pb 2010/12/28 lay-out, spelling
  22. */
  23. #include "ManPagesM.h"
  24. #include "KNN.h"
  25. void manual_KNN_init (ManPages me);
  26. void manual_KNN_init (ManPages me)
  27. {
  28. MAN_BEGIN (U"kNN classifiers", U"Ola Söder", 20080529)
  29. INTRO (U"This tutorial describes the use of kNN classifiers in Praat.")
  30. NORMAL (U"@@kNN classifiers 1. What is a kNN classifier?|1. What is a kNN classifier?@")
  31. NORMAL (U"@@kNN classifiers 1.1. Improving classification accuracy|1.1. Improving classification accuracy@")
  32. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting|1.1.1. Feature weighting@")
  33. LIST_ITEM (U"@@kNN classifiers 1.1.1.1. Filter-based feature weighting|1.1.1.1. Filter-based feature weighting@")
  34. LIST_ITEM (U"@@kNN classifiers 1.1.1.2. Wrapper-based feature weighting|1.1.1.2. Wrapper-based feature weighting@")
  35. LIST_ITEM (U"@@kNN classifiers 1.1.2. Model selection|1.1.2. Model selection@")
  36. NORMAL (U"@@kNN classifiers 1.2. Improving resource efficiency|1.2. Improving resource efficiency@")
  37. LIST_ITEM (U"@@kNN classifiers 1.2.1. Pruning|1.2.1. Pruning@")
  38. NORMAL (U"@@kNN classifiers 2. Quick start|2. Quick start@")
  39. NORMAL (U"@@kNN classifiers 3. Command overview|3. Command overview@")
  40. MAN_END
  41. MAN_BEGIN (U"KNN", U"Ola Söder", 20080529)
  42. INTRO (U"One of the @@types of objects@ in Praat.")
  43. NORMAL (U"A #KNN classifier can %learn associations between its %input and its %output. "
  44. "The @@kNN classifiers@ tutorial gives you an introduction to the usage of %%k%NN learners in Praat.")
  45. MAN_END
  46. MAN_BEGIN (U"FeatureWeights", U"Ola Söder", 20080729)
  47. INTRO (U"One of the @@types of objects@ in Praat.")
  48. NORMAL (U"A @FeatureWeights object is a %d-dimensional vector containing weight values used to transform a %d-dimensional space. Feature weighting can be used to improve the classifcation accuracy of @KNN classifiers. It can also be used to generate a @Dissimilarity matrix from a @PatternList object. @Dissimilarity matrices in conjunction with @@Multidimensional scaling|MDS-analysis@ can aid the visualization of high-dimensional data.")
  49. MAN_END
  50. MAN_BEGIN (U"kNN classifiers 1. What is a kNN classifier?", U"Ola Söder", 20080529)
  51. NORMAL (U"%%Instance-based% classifiers such as the %%k%NN classifier operate on the premises that classification of unknown instances "
  52. "can be done by relating the unknown to the known according to some @@Euclidean distance|distance/similarity function@. "
  53. "The intuition is that two instances far apart in the %%instance space% defined by the appropriate %%distance function% are less likely "
  54. "than two closely situated instances to belong to the same class.")
  55. ENTRY (U"The learning process")
  56. NORMAL (U"Unlike many artificial learners, %%instance-based% learners do not abstract any information from the training data during the learning phase. "
  57. "Learning is merely a question of encapsulating the training data. The process of %generalization is postponed until it is absolutely unavoidable, "
  58. "that is, at the time of %classification. This property has lead to the referring to %%instance-based% learners as %lazy learners, "
  59. "whereas classifiers such as @@Feedforward neural networks 1. What is a feedforward neural network?|feedforward neural networks@, "
  60. "where proper %abstraction is done during the learning phase, often are entitled %eager learners.")
  61. ENTRY (U"Classification")
  62. NORMAL (U"Classification (%generalization) using an %%instance-based% classifier can be a simple matter of locating the nearest neighbour in %%instance space% "
  63. "and labelling the unknown instance with the same class label as that of the located (known) neighbour. "
  64. "This approach is often referred to as a %%nearest neighbour classifier%. "
  65. "The downside of this simple approach is the lack of robustness that characterize the resulting classifiers. "
  66. "The high degree of local sensitivity makes %%nearest neighbour classifiers% highly susceptible to noise in the training data.")
  67. NORMAL (U"More robust models can be achieved by locating %k, where %k > 1, neighbours and letting the majority vote decide the outcome of the class labelling. "
  68. "A higher value of %k results in a smoother, less locally sensitive, function. "
  69. "The %%nearest neighbour classifier% can be regarded as a special case of the more general %%k-nearest neighbours classifier%, "
  70. "hereafter referred to as a %%k%NN classifier. The drawback of increasing the value of %k is of course that as %k approaches %n, "
  71. "where %n is the size of the %%instance base%, the performance of the classifier will approach that of the most straightforward %%statistical baseline%, "
  72. "the assumption that all unknown instances belong to the class most most frequently represented in the training data.")
  73. NORMAL (U"This problem can be avoided by limiting the influence of distant instances. "
  74. "One way of doing so is to assign a weight to each vote, where the weight is a function of the distance between the unknown and the known instance. "
  75. "By letting each weight be defined by the inversed squared distance between the known and unknown instances votes cast by distant instances "
  76. "will have very little influence on the decision process compared to instances in the near neighbourhood. "
  77. "%%Distance weighted voting% usually serves as a good middle ground as far as local sensitivity is concerned.")
  78. MAN_END
  79. MAN_BEGIN (U"kNN classifiers 1.1. Improving classification accuracy", U"Ola Söder", 20080529)
  80. NORMAL (U"This tutorial describes possible ways of improving the classification accuracy of a %%k%NN classifier.")
  81. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting|1.1.1. Feature weighting@")
  82. LIST_ITEM (U"@@kNN classifiers 1.1.2. Model selection|1.1.2. Model selection@")
  83. MAN_END
  84. MAN_BEGIN (U"kNN classifiers 1.1.1. Feature weighting", U"Ola Söder", 20080529)
  85. NORMAL (U"A %%k%NN classifier in its most basic form operates under the implicit assumption that all features are of equal value as far as the classification problem at hand is concerned. When irrelevant and noisy features influence the neighbourhood search to the same degree as highly relevant features, the accuracy of the model is likely to deteriorate. %%Feature weighting% is a technique used to approximate the optimal degree of influence of individual features using a training set. When successfully applied relevant features are attributed a high weight value, whereas irrelevant features are given a weight value close to zero. %%Feature weighting% can be used not only to improve classification accuracy but also to discard features with weights below a certain threshold value and thereby increase the resource efficiency of the classifier.")
  86. NORMAL (U"Two fundamentally different approaches to this optimization problem can be identified, the %%filter-based% and the %%wrapper-based%. The class of %%filter-based% methods contains algorithms that use no input other than the training data itself to calculate the %%feature weights%, whereas %%wrapper-based% algorithms use feedback from a classifier to guide the search. %%Wrapper-based% algorithms are inherently more powerful than their filter-based counterpart as they implicitly take the %%inductive bias% of the classifier into account. This power comes at a price however; the usage of %%wrapper-based% algorithms increases the risk of %%overfitting% the training data.")
  87. NORMAL (U"In @@kNN classifiers 1.1.1.1. Filter-based feature weighting|section 1.1.1.1.@ the %%filter-based% feature weighting algorithm implemented in Praat is presented. @@kNN classifiers 1.1.1.2. Wrapper-based feature weighting|Section 1.1.1.2.@ contains an account of the implemented %%wrapper-based% feature weighting algorithm.")
  88. MAN_END
  89. MAN_BEGIN (U"kNN classifiers 1.1.1.1. Filter-based feature weighting", U"Ola Söder", 20080529)
  90. NORMAL (U"The %%filter-based% %%feature weighting% algorithm implemented in Praat is an extended version of the well known R\\s{ELIEF} algorithm, R\\s{ELIEF-F}, such as it is presented (with one minor exception, see below) in @@Igor Kononenko (1994)|Kononenko (1994)@. Unlike the original R\\s{ELIEF} algorithm, the R\\s{ELIEF-F} algorithm copes with multi class (as in more than 2 classes) data sets. The simple intuition behind the R\\s{ELIEF-F} algorithm is that a good feature is a feature with little within class variance and generous amounts of between-class variance. A bad feature is characterized by within-class and between-class variances of magnitudes roughly equal.")
  91. NORMAL (U"The computation of the weight vector is done in an iterative fashion, with all weights initially set to 0. All features in the training set are normalized (all values are set within the range [0 ... 1]) and thereafter used to update the weight vector as follows: On each iteration a random instance is chosen. The nearest hit is located, where hit is an instance of the same class as that of the randomly chosen instance. The nearest misses of all the classes but that of the randomly chosen instance are located, where a miss is an instance of a class different from that of the randomly chosen instance. Each weight is updated by subtracting the difference between the given attribute of the randomly chosen instance and that of the nearest hit and adding the corresponding difference between the chosen instance and all the nearest misses weighted by the prior probabilities of their classes. If the distance between the attribute of the randomly chosen instance and the nearest hit equals the corresponding value for the nearest miss(es) then the weight value will not change, it will thus remain 0 given that the current iteration is the first one. If the difference between the attribute of the chosen instance and the nearest hit is lower than the corresponding value for the miss(es) then the weight value will be increased. On average highly significant attributes will result in absolute values distinct from 0, leading to an absolute increase of the weight, and insignificant attributes will on average result in values near 0, retaining the pessimistic view that all features are of no value as predictors.")
  92. NORMAL (U"The implementation of R\\s{ELIEF-F} found in Praat differs slightly from the algorithm described. Instances are not randomly chosen, instead all instances are used to update the weight vector. This simplification is of no concern unless massive data sets are used, in which case the Praat approach would be no less precise, but needlessly slow.")
  93. MAN_END
  94. MAN_BEGIN (U"kNN classifiers 1.1.1.2. Wrapper-based feature weighting", U"Ola Söder", 20090123)
  95. NORMAL (U"The %%wrapper-based% %%feature weighting% algorithm implemented in Praat attempts to find the globally optimal feature weights by means of a greedy local search. The local neighbourhood is defined by a number of random seeds centered around a pivot seed. For each iteration of the algorithm the best performing seed is chosen to be the pivot of the next iteration. At the same time the maximum allowed distance between a seed and the pivot is decreased by a constant factor. Given a sufficient number of seeds and a small shrinkage factor the algorithm will on average home in on a good approximation of the global maximum.")
  96. NORMAL (U"Given reasonable parameter settings, the search algorithm on average gives good results without huge memory and CPU requirements. The strong point of the method, the ability to efficiently do a rather thorough search of the weight space, can, as mentioned in @@kNN classifiers 1.1.1. Feature weighting|section 1.1.1.@, lead to overfitting. One way of minimizing this problem is to constrain the algorithm by means of %%early stopping%. In Praat this is done in a very naive fashion, by simply not allowing the algorithm to explore weights achieving an accuracy higher than a specified threshold value.")
  97. MAN_END
  98. MAN_BEGIN (U"kNN classifiers 1.1.2. Model selection", U"Ola Söder", 20080529)
  99. NORMAL (U"%%Model selection% is the process of choosing classifier parameters suitable for the classification task at hand. In most cases this is done manually in an experimental fashion.")
  100. NORMAL (U"The search for the optimal model can also be automated. In Praat this is done by means of the same @@kNN classifiers 1.1.1.2. Wrapper-based feature weighting|greedy local search algorithm@ used to search the weight space for feature weights. The %%model selection% search implementation in Praat lets the user limit the search space with respect to the parameter %k. By setting a maximum allowed value of %k the search space can be shrunk considerably.")
  101. NORMAL (U"Due to its discrete (%k) and nominal (vote weighting) nature, the size of the search space is normally of no concern, making an experimental/manual search tractable. The %%model selection% feature of Praat becomes an essential tool only when applied to huge instance bases where the expected optimal value of %k is high. In most cases however, manual experimenting will suffice.")
  102. MAN_END
  103. MAN_BEGIN (U"kNN classifiers 1.2. Improving resource efficiency", U"Ola Söder", 20080529)
  104. NORMAL (U"%%Instance-based% learners such as the %%k%NN classifier implemented in Praat are fairly inefficient as far as CPU and memory usage is concerned. The performance can be improved upon by making sure that only those instances that are vital for the accuracy of the classifier are stored and that non-vital instances are disposed of. Praat does give the user the possibility to prune non-vital or harmful instances, making the resulting classifier less memory and CPU hungry and in some cases more accurate even though that is not the primary objective of the pruning algorithm.")
  105. MAN_END
  106. MAN_BEGIN (U"kNN classifiers 1.2.1. Pruning", U"Ola Söder", 20080529)
  107. NORMAL (U"Pruning is the process of discarding instances that do not improve upon the classification accuracy of the classifier. This group of instances includes noisy instances that, at best, make no difference as far as model accuracy is concerned, at worst, induces classification errors. It also includes instances that are redundant; instances that are implied by the defined neighbourhood.")
  108. ENTRY (U"The C-Pruner algorithm")
  109. NORMAL (U"The C-Pruner algorithm such as it is presented in @@Ke-Ping Zhao et al. (2003)|Zhao et al. (2003)@ identifies pruning candidates and computes the order in which these candidates shall be removed. The ordering is of vital importance since the removal of one candidate might disqualify other candidates, making them non-prunable. In order to understand how the C-Pruner algorithm operates a few definitions are necessary:")
  110. LIST_ITEM (U"\\bu The %k-reachability set of an instance %p consists of the %k nearest neighbours of %p")
  111. LIST_ITEM (U"\\bu The %k-coverage set of an instance %p consists of those instances of the same class as that of %p having %p as one of their %k nearest neighbours.")
  112. LIST_ITEM (U"\\bu An instance %p is superfluous if it is implied by %k-reachability, that is, if it can be correctly classified using %k-reachability as instance base.")
  113. LIST_ITEM (U"\\bu An instance %p is critical if at least one instance in the %k-coverage set of %p is not implied by the %k-reachability set of %p or, after %p is deleted, atleast one instance in the %k-coverage set of %p is not implied by the %k-reachability set of %p.")
  114. LIST_ITEM (U"\\bu An instance %p is noisy if %p isn't superfluous and the %k-reachability set of %p is bigger than the %k-coverage set of %p.")
  115. NORMAL (U"Given these definitions, an instance is tagged for pruning if one of the following conditions hold: It is noisy, or it is superfluous but not critical. This translates to the discarding of instances that are bad class predictors (noise) and of instances that are highly typical of their class and thus are located close to the center of the cluster defining the given class. Instances located close to the class center are very likely implied by the surrounding border instances and thus redundant. In order to avoid destructive domino effects it is important that the pruning starts close to the center of the cluster and works its way out and not the other way around. To impose this ordering the C-Pruner algorithm uses the following heuristics to determine the order of removal of two superfluous instances %p__i_ and %p__j_:")
  116. LIST_ITEM (U"\\bu If the %k-reachability set of %p__i_ contains more instances of the same class as that of %p__i_ than the corresponding value for %p__j_ then %p__i_ should be removed before %p__j_.")
  117. LIST_ITEM (U"\\bu If the %k-reachability set of %p__i_ contains the same number of instances of the same class as that of %p__i_ as the corresponding value for %p__j_ and the distance between %p__i_ and the closest instance of a class different from the class of %p__i_ is greater than the corresponding value for %p__j_ then %p__i_ should be removed before %p__j_.")
  118. LIST_ITEM (U"\\bu If the %k-reachability set of %p__i_ contains the same number of instances of the same class as that of %p__i_ as the corresponding value for %p__j_ and the distance between %p__i_ and the closest instance of a class different from the class of %p__i_ equals the corresponding value for %p__j_ then the order of removal is decided randomly.")
  119. NORMAL (U"In order to gain control of the degree of pruning the Praat implementation of the C-Pruner algorithm decides whether to prune or not prune a given instance tagged for pruning on a probabilistic basis. This makes it possible for the user to specify the hardness of the pruning process (e.g. 100 percent (exp.) noise, 50 percent (exp.) redundancy) to be able to find a good compromise between model accuracy and resource requirements.")
  120. MAN_END
  121. MAN_BEGIN (U"kNN classifiers 2. Quick start", U"Ola Söder", 20080809)
  122. ENTRY (U"An example: Learning the Iris data set")
  123. NORMAL (U"In the @@Feedforward neural networks|the feedforward neural networks tutorial@ a description of how the @FFNet classifier in Praat can be applied to @@iris data set|the Iris data set@ can be found.")
  124. NORMAL (U"The same data can be used to test the %%k%NN feature of Praat. To do so create an example data set using the @@Create iris example...@ command found in the ##Feedforward neural networks# submenu. The form prompting for network topology settings can be ignored by selecting OK. Select the newly created @PatternList and @Categories objects and click ##To KNN Classifier...#. A form prompting for a name of the classifier to be created will be shown. The ordering in which instances are to be inserted into the instance base can also be specified, make sure that #Random is selected and thereafter close the form by selecting OK. The newly created and trained classifier will be shown in the list of objects.")
  125. NORMAL (U"To estimate how well the classifier can be expected to classify new samples of Irises select ##Query -# \\=> ##Get accuracy estimate...#. A form prompting for %%k%NN parameter settings and evaluation method will be shown. Experiment with the parameter settings until satisfactory results are achieved. If everything worked out the estimate will likely end up somewhere in the range of 94-98 percent.")
  126. NORMAL (U"An alternative to manually experimenting with model parameters is to let the computer do the job. This is done be choosing the @KNN object and thereafter selecting ##Query -# \\=> ##Get optimized parameters...#. The form shown prompts for a selection of parameters controlling the search. The default values will in most cases, including this, be appropriate.")
  127. NORMAL (U"Another way of improving classification accuracy is to transform the instance space in which the individual instances, in this case Irises, are stored as to maximize the distance between instances of different classes and minimize the distance between instances of the same class. This can be done by means of feature weighting. To do so select the @KNN object and choose ##To FeatureWeights...#. Adjust the %%k%NN settings according to the ones found by the model search algorithm and let the remaining options retain the default values. Click OK. A @FeatureWeights object will be added to the objects list. The feature weights contained within the newly created object can be used by selecting named object in conjunction with the @KNN classifier and thereafter choosing the desired action.")
  128. MAN_END
  129. MAN_BEGIN (U"k-means clustering", U"Ola Söder", 20080529)
  130. INTRO (U"This tutorial describes the use of %%k%-means clustering in Praat. ")
  131. NORMAL (U"@@k-means clustering 1. How does k-means clustering work?|1. How does k-means clustering work?@")
  132. NORMAL (U"@@k-means clustering 2. Quick start|2. Quick start@")
  133. MAN_END
  134. MAN_BEGIN (U"k-means clustering 1. How does k-means clustering work?", U"Ola Söder", 20080529)
  135. NORMAL (U"The %%k%-means clustering algorithm attempts to split a given anonymous data set (a set containing no information as to class identity) into a fixed number (%k) of clusters.")
  136. NORMAL (U"Initially %k number of so called %centroids are chosen. A %centroid is a data point (imaginary or real) at the center of a cluster. In Praat each centroid is an existing data point in the given input data set, picked at random, such that all %centroids are unique (that is, for all %centroids %c__%i_ and %c__%j_, %c__%i_ \\=/ %c__%j_). These %centroids are used to train a @@kNN classifiers 1. What is a kNN classifier?|kNN classifier@. The resulting classifier is used to classify (using %k = 1) the data and thereby produce an initial randomized set of clusters. Each %centroid is thereafter set to the arithmetic mean of the cluster it defines. The process of classification and %centroid adjustment is repeated until the values of the %centroids stabilize. The final %centroids will be used to produce the final classification/clustering of the input data, effectively turning the set of initially anonymous data points into a set of data points, each with a class identity.")
  137. MAN_END
  138. MAN_BEGIN (U"k-means clustering 2. Quick start", U"Ola Söder", 20080529)
  139. NORMAL (U"Clustering using the %%k%-means clustering algorithm in Praat is done by selecting a @PatternList and choosing ##To Categories...#. In the appearing requester the number of sought after clusters (unique categories) can be specified. The cluster size ratio constraint (%z) imposes a constraint on the output such that %cluster size(%x) / %cluster size(%y) > %z for all clusters %x and %y in the resulting set of clusters. Valid values of %z are 0 < %z <= 1 where values near 0 imposes practically no constraints on the cluster sizes and a value of 1 tells the algorithm to attempt to create clusters of equal size. The size ratio constraint is enforced in a very naive fashion, by random reseeding. Since this can be a rather time consuming process it is possible to set an upper bound on the number of reseeds done by the algorithm. This upper bound is defined by the parameter ##Maximum number of reseeds#. It should be noted however that normally there's no need to use the size ratio constraint, selecting the desired number of clusters will, on average, result in clusters of roughly equal size, given well distributed data.")
  140. MAN_END
  141. MAN_BEGIN (U"PatternList to Dissimilarity", U"Ola Söder", 20080529)
  142. NORMAL (U"A @Dissimilarity matrix can be used in conjunction with @@Multidimensional scaling|Multidimensional scaling@ to aid visualization of high-dimensional data. A @Dissimilarity object is a matrix of the distances, according to the chosen @@Euclidean distance|distance function@, between all the data points in the @PatternList object.")
  143. NORMAL (U"A @Dissimilarity object can be created by selecting a @PatternList object and choosing ##To Dissimilarity#. The dissimilarity matrix can also be computed using feature weights. This is done by selecting a @PatternList object, an @FeatureWeights object and choosing ##To Dissimilarity#.")
  144. MAN_END
  145. MAN_BEGIN (U"Euclidean distance", U"Ola Söder", 20080529)
  146. NORMAL (U"The Euclidean distance between the %n-dimensional vectors %a and %b can be calculated as follows:")
  147. FORMULA (U"\\De__Euclidean_(%x,%y) = \\Vr(\\su__%i=1..%n_(%a__%i_ - %b__%I_)^2)")
  148. MAN_END
  149. MAN_BEGIN (U"kNN classifiers 3. Command overview", U"Ola Söder", 20080809 )
  150. INTRO (U"KNN commands")
  151. ENTRY (U"Creation:")
  152. LIST_ITEM (U"\\bu @@PatternList & Categories: To KNN classifier...@")
  153. LIST_ITEM (U"\\bu @@Create KNN...@")
  154. ENTRY (U"Learning:")
  155. LIST_ITEM (U"\\bu @@KNN & PatternList & Categories: Learn...@")
  156. ENTRY (U"Classification:")
  157. LIST_ITEM (U"\\bu @@KNN & PatternList: To Categories...@")
  158. LIST_ITEM (U"\\bu @@KNN & PatternList: To TabelOfReal...@")
  159. LIST_ITEM (U"\\bu @@KNN & PatternList & FeatureWeights: To Categories...@")
  160. LIST_ITEM (U"\\bu @@KNN & PatternList & FeatureWeights: To TableOfReal...@")
  161. ENTRY (U"Evaluation:")
  162. LIST_ITEM (U"\\bu @@KNN & PatternList & Categories: Evaluate...@")
  163. LIST_ITEM (U"\\bu @@KNN & PatternList & Categories & FeatureWeights: Evaluate...@")
  164. ENTRY (U"Queries")
  165. LIST_ITEM (U"\\bu @@KNN: Get optimized parameters...@")
  166. LIST_ITEM (U"\\bu @@KNN: Get accuracy estimate...@")
  167. LIST_ITEM (U"\\bu @@KNN & FeatureWeights: Get accuracy estimate...@")
  168. LIST_ITEM (U"\\bu @@KNN: Get size of instance base@")
  169. ENTRY (U"Extractions")
  170. LIST_ITEM (U"\\bu @@KNN: Extract input Pattern@")
  171. LIST_ITEM (U"\\bu @@KNN: Extract output Categories@")
  172. ENTRY (U"Modification:")
  173. LIST_ITEM (U"\\bu @@KNN: Shuffle@")
  174. LIST_ITEM (U"\\bu @@KNN: Prune...@")
  175. LIST_ITEM (U"\\bu @@KNN: Reset...@")
  176. ENTRY (U"Miscellaneous:")
  177. LIST_ITEM (U"\\bu @@KNN: To FeatureWeights...@")
  178. LIST_ITEM (U"\\bu @@KNN & PatternList & Categories: To FeatureWeights...@")
  179. LIST_ITEM (U"\\bu @@PatternList & Categories: To FeatureWeights...@")
  180. ENTRY (U"Pre/post processing:")
  181. LIST_ITEM (U"\\bu @@PatternList: To Categories...@")
  182. LIST_ITEM (U"\\bu @@PatternList & FeatureWeights: To Categories...@")
  183. LIST_ITEM (U"\\bu @@PatternList: To Dissimilarity...@")
  184. LIST_ITEM (U"\\bu @@PatternList & FeatureWeights: To Dissimilarity...@")
  185. MAN_END
  186. MAN_BEGIN (U"PatternList & Categories: To KNN classifier...", U"Ola Söder", 20080726 )
  187. INTRO (U"Create and train a @KNN classifier using the selected @PatternList and @Categories objects as training data.")
  188. ENTRY (U"Settings")
  189. TAG (U"##Name")\
  190. DEFINITION (U"The name of the @KNN classifier.")
  191. ENTRY (U"See also:")
  192. LIST_ITEM (U"@@kNN classifiers@")
  193. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  194. MAN_END
  195. MAN_BEGIN (U"Create KNN...", U"Ola Söder", 20080718 )
  196. INTRO (U"Create a new @KNN classifier.")
  197. ENTRY (U"Settings")
  198. TAG (U"##Name")
  199. DEFINITION (U"The name of the classifier.")
  200. ENTRY (U"See also:")
  201. LIST_ITEM (U"@@kNN classifiers@")
  202. MAN_END
  203. MAN_BEGIN (U"KNN & PatternList & Categories: Learn...", U"Ola Söder", 20080726 )
  204. INTRO (U"Train the selected @KNN classifier using the chosen @PatternList and @Categories objects as training data.")
  205. ENTRY (U"Settings")
  206. TAG (U"##Name")
  207. DEFINITION (U"The name of the classifier.")
  208. TAG (U"##Ordering")
  209. DEFINITION (U"The order in which the training instances are to be inserted into the instance base.")
  210. ENTRY (U"See also:")
  211. LIST_ITEM (U"@@kNN classifiers@")
  212. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  213. MAN_END
  214. MAN_BEGIN (U"KNN & PatternList: To Categories...", U"Ola Söder", 20080726 )
  215. INTRO (U"Use the selected @KNN classifier to classify the chosen @PatternList. A @Categories object containing the result will be created.")
  216. ENTRY (U"Settings")
  217. TAG (U"##k neighbours")
  218. DEFINITION (U"The size of the neighbourhood.")
  219. TAG (U"##Vote weighting")
  220. DEFINITION (U"The type of vote weighting to be used.")
  221. ENTRY (U"See also:")
  222. LIST_ITEM (U"@@kNN classifiers@")
  223. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  224. MAN_END
  225. MAN_BEGIN (U"KNN & PatternList: To TabelOfReal...", U"Ola Söder", 20080718 )
  226. INTRO (U"Use the selected @KNN classifier to classify the chosen @PatternList. A @TableOfReal object containing verbose information on the decision process will be created.")
  227. ENTRY (U"Settings")
  228. TAG (U"##k neighbours")
  229. DEFINITION (U"The size of the neighbourhood.")
  230. TAG (U"##Vote weighting")
  231. DEFINITION (U"The type of vote weighting to be used.")
  232. TAG (U"##Output")
  233. DEFINITION (U"Specifies the degree of verbosity, where %%winners only% generates a @TableOfReal containing information on the instances of the winning category only whereas %%All candidates% results in a @TableOfReal with verbose information on all unique categories in the neighbourhood defined by the parameter %k.")
  234. ENTRY (U"See also:")
  235. LIST_ITEM (U"@@kNN classifiers@")
  236. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  237. MAN_END
  238. MAN_BEGIN (U"KNN & PatternList & FeatureWeights: To Categories...", U"Ola Söder", 20080726 )
  239. INTRO (U"Use the selected @KNN classifier and @FeatureWeights object to classify the chosen @PatternList. A @Categories object containing the result will be created.")
  240. ENTRY (U"Settings")
  241. TAG (U"##k neighbours")
  242. DEFINITION (U"The size of the neighbourhood.")
  243. TAG (U"##Vote weighting")
  244. DEFINITION (U"The type of vote weighting to be used.")
  245. ENTRY (U"See also:")
  246. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  247. LIST_ITEM (U"@@kNN classifiers 1.1.1.1. Filter-based feature weighting@")
  248. LIST_ITEM (U"@@kNN classifiers 1.1.1.2. Wrapper-based feature weighting@")
  249. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  250. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  251. LIST_ITEM (U"@@kNN classifiers@")
  252. MAN_END
  253. MAN_BEGIN (U"KNN & PatternList & FeatureWeights: To TableOfReal...", U"Ola Söder", 20080718 )
  254. INTRO (U"Use the selected @KNN classifier and the feature weights, @FeatureWeights, to classify the chosen @PatternList. A @TableOfReal object containing verbose information on the decision process will be created.")
  255. ENTRY (U"Settings")
  256. TAG (U"##k neighbours")
  257. DEFINITION (U"The size of the neighbourhood.")
  258. TAG (U"##Vote weighting")
  259. DEFINITION (U"The type of vote weighting to be used.")
  260. TAG (U"##Output")
  261. DEFINITION (U"Specifies the degree of verbosity, where %%winners only% generates a @TableOfReal containing information on the instances of the winning category only whereas %%All candidates% results in a @TableOfReal with verbose information on all unique categories in the neighbourhood defined by the parameter %k.")
  262. ENTRY (U"See also:")
  263. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  264. LIST_ITEM (U"@@kNN classifiers 1.1.1.1. Filter-based feature weighting@")
  265. LIST_ITEM (U"@@kNN classifiers 1.1.1.2. Wrapper-based feature weighting@")
  266. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  267. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  268. LIST_ITEM (U"@@kNN classifiers@")
  269. MAN_END
  270. MAN_BEGIN (U"KNN: Get optimized parameters...", U"Ola Söder", 20080718 )
  271. INTRO (U"Perform an automated search for the optimal @KNN parameter settings.")
  272. ENTRY (U"Settings")
  273. TAG (U"##Evaluation method")
  274. DEFINITION (U"The method to be used for estimating the classification accuracy, which in turn is used to guide the model search. "
  275. "Supported methods are 10-fold cross-validation and leave-one-out.")
  276. TAG (U"##k max")
  277. DEFINITION (U"The maximum value of the parameter %k.")
  278. TAG (U"##Number of seeds")
  279. DEFINITION (U"The size of the parameter neighbourhood to be searched.")
  280. TAG (U"##Learning rate")
  281. DEFINITION (U"The rate at which the parameter neighbourhood is shrunk (per iteration).")
  282. ENTRY (U"See also:")
  283. LIST_ITEM (U"@@kNN classifiers 1.1.2. Model selection@")
  284. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  285. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  286. LIST_ITEM (U"@@kNN classifiers@")
  287. MAN_END
  288. MAN_BEGIN (U"KNN: Get accuracy estimate...", U"Ola Söder", 20080718 )
  289. INTRO (U"Estimate the classification accuracy of the selected @KNN classifier.")
  290. ENTRY (U"Settings")
  291. TAG (U"##Evaluation method")
  292. DEFINITION (U"The method to be used for estimating the classification accuracy. Supported methods are 10-fold cross-validation and leave-one-out.")
  293. TAG (U"##k neighbours")
  294. DEFINITION (U"The size of the neighbourhood.")
  295. TAG (U"##Vote weighting")
  296. DEFINITION (U"The type of vote weighting to be used.")
  297. ENTRY (U"See also:")
  298. LIST_ITEM (U"@@KNN & FeatureWeights: Get accuracy estimate...@")
  299. LIST_ITEM (U"@@KNN & PatternList & Categories: Evaluate...@")
  300. LIST_ITEM (U"@@KNN & PatternList & Categories & FeatureWeights: Evaluate...@")
  301. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  302. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  303. LIST_ITEM (U"@@kNN classifiers 1.1.2. Model selection@")
  304. LIST_ITEM (U"@@kNN classifiers@")
  305. MAN_END
  306. MAN_BEGIN (U"KNN & FeatureWeights: Get accuracy estimate...", U"Ola Söder", 20080809)
  307. INTRO (U"Estimate the classification accuracy of the @KNN classifier using the chosen @FeatureWeights.")
  308. ENTRY (U"Settings")
  309. TAG (U"##Evaluation method")
  310. DEFINITION (U"The method to be used for estimating the classification accuracy. "
  311. "Supported methods are 10-fold cross-validation and leave-one-out.")
  312. TAG (U"##k neighbours")
  313. DEFINITION (U"The size of the neighbourhood.")
  314. TAG (U"##Vote weighting")
  315. DEFINITION (U"The type of vote weighting to be used.")
  316. ENTRY (U"See also:")
  317. LIST_ITEM (U"@@KNN & PatternList & Categories: Evaluate...@")
  318. LIST_ITEM (U"@@KNN & PatternList & Categories & FeatureWeights: Evaluate...@")
  319. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  320. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  321. LIST_ITEM (U"@@kNN classifiers 1.1.2. Model selection@")
  322. LIST_ITEM (U"@@kNN classifiers@")
  323. MAN_END
  324. MAN_BEGIN (U"KNN & PatternList & Categories: Evaluate...", U"Ola Söder", 20080718)
  325. INTRO (U"Estimate the classification accuracy of the chosen @KNN classifier using the selected @PatternList and @Categories objects as test set.")
  326. ENTRY (U"Settings")
  327. TAG (U"##k neighbours")
  328. DEFINITION (U"The size of the neighbourhood.")
  329. TAG (U"##Vote weighting")
  330. DEFINITION (U"The type of vote weighting to be used.")
  331. ENTRY (U"See also:")
  332. LIST_ITEM (U"@@KNN & PatternList & Categories & FeatureWeights: Evaluate...@")
  333. LIST_ITEM (U"@@KNN: Get accuracy estimate...@")
  334. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  335. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  336. LIST_ITEM (U"@@kNN classifiers 1.1.2. Model selection@")
  337. LIST_ITEM (U"@@kNN classifiers@")
  338. MAN_END
  339. MAN_BEGIN (U"KNN & PatternList & Categories & FeatureWeights: Evaluate...", U"Ola Söder", 20080718)
  340. INTRO (U"Estimate the classification accuracy of the chosen @KNN classifier using the selected @PatternList and @Categories objects as test set. The selected @FeatureWeights object will be used in the classification process.")
  341. ENTRY (U"Settings")
  342. TAG (U"##k neighbours")
  343. DEFINITION (U"The size of the neighbourhood.")
  344. TAG (U"##Vote weighting")
  345. DEFINITION (U"The type of vote weighting to be used.")
  346. ENTRY (U"See also:")
  347. LIST_ITEM (U"@@KNN & PatternList & Categories: Evaluate...@")
  348. LIST_ITEM (U"@@KNN: Get accuracy estimate...@")
  349. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  350. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  351. LIST_ITEM (U"@@kNN classifiers 1.1.2. Model selection@")
  352. LIST_ITEM (U"@@kNN classifiers@")
  353. MAN_END
  354. MAN_BEGIN (U"KNN: Get size of instance base", U"Ola Söder", 20080718)
  355. INTRO (U"Queries the selected @KNN for the number of instances in the instance base. ")
  356. ENTRY (U"See also:")
  357. LIST_ITEM (U"@@kNN classifiers@")
  358. MAN_END
  359. MAN_BEGIN (U"KNN: Extract input Pattern", U"Ola Söder", 20080726)
  360. INTRO (U"Create a new @PatternList object identical to the one in the instance base of the selected @KNN classifier.")
  361. ENTRY (U"See also:")
  362. LIST_ITEM (U"@@kNN classifiers@")
  363. MAN_END
  364. MAN_BEGIN (U"KNN: Extract output Categories", U"Ola Söder", 20080726)
  365. INTRO (U"Create a new @Categories object identical to the one in the instance base of the selected @KNN classifier.")
  366. ENTRY (U"See also:")
  367. LIST_ITEM (U"@@kNN classifiers@")
  368. MAN_END
  369. MAN_BEGIN (U"KNN: Shuffle", U"Ola Söder", 20080718)
  370. INTRO (U"Shuffle the instance base of the selected classifier.")
  371. ENTRY (U"See also:")
  372. LIST_ITEM (U"@@kNN classifiers@")
  373. MAN_END
  374. MAN_BEGIN (U"KNN: Prune...", U"Ola Söder", 20080718)
  375. INTRO (U"Prune the instance base of the selected classifier.")
  376. ENTRY (U"Settings")
  377. TAG (U"##Noise pruning degree")
  378. DEFINITION (U"A value in the range [0 ... 1] specifying the probability that instances tagged as noisy by @@kNN classifiers 1.2.1. Pruning|the C-Pruner algorithm@ will be pruned.")
  379. TAG (U"##Redundancy pruning degree")
  380. DEFINITION (U"The probability that instances tagged as redundant by @@kNN classifiers 1.2.1. Pruning|the C-Pruner algorithm@ will be pruned.")
  381. TAG (U"##k neighbours")
  382. DEFINITION (U"The size of the neighbourhood.")
  383. ENTRY (U"See also:")
  384. LIST_ITEM (U"@@kNN classifiers 1.2.1. Pruning@")
  385. LIST_ITEM (U"@@kNN classifiers 1.2. Improving resource efficiency@")
  386. LIST_ITEM (U"@@kNN classifiers@")
  387. MAN_END
  388. MAN_BEGIN (U"KNN: Reset...", U"Ola Söder", 20080718)
  389. INTRO (U"Empty the instance base of the selected classifier.")
  390. ENTRY (U"See also:")
  391. LIST_ITEM (U"@@kNN classifiers@")
  392. MAN_END
  393. MAN_BEGIN (U"KNN: To FeatureWeights...", U"Ola Söder", 20080728)
  394. INTRO (U"Wrap the selected @KNN and use its feedback to guide the search for the optimal feature weights. A @FeatureWeights object will be created.")
  395. ENTRY (U"Settings")
  396. TAG (U"##Learning rate")
  397. DEFINITION (U"The rate at which the maximum distance between the pivot and a random seed is decremented.")
  398. TAG (U"##Number of seeds")
  399. DEFINITION (U"The size of the feature weight neighbourhood.")
  400. TAG (U"##Stop at")
  401. DEFINITION (U"A value specifying a stopping criterion. When feature weights yielding accuracy estimates higher than the specified value the search will stop. A value of 1 imposes no constraints whereas a value of 0.5 will result in the termination of the search algorithm once feature weights resulting in an classification accuracy of 50 percent or better are found.")
  402. TAG (U"##Optimization")
  403. DEFINITION (U"Specifies whether to search for all features simultaneously or one at a time.")
  404. TAG (U"##Evaluation method")
  405. DEFINITION (U"The method to be used for estimating the classification accuracy. Supported methods are 10-fold cross-validation and leave-one-out.")
  406. TAG (U"##k neighbours")
  407. DEFINITION (U"The size of the neighbourhood used for feedback classification.")
  408. TAG (U"##Vote weighting")
  409. DEFINITION (U"The type of vote weighting to be used.")
  410. ENTRY (U"See also:")
  411. LIST_ITEM (U"@@kNN classifiers 1.1.1.2. Wrapper-based feature weighting@")
  412. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  413. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  414. LIST_ITEM (U"@@KNN & PatternList & Categories & FeatureWeights: Evaluate...@")
  415. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  416. LIST_ITEM (U"@@kNN classifiers@")
  417. MAN_END
  418. MAN_BEGIN (U"KNN & PatternList & Categories: To FeatureWeights...", U"Ola Söder", 20080809)
  419. INTRO (U"Wrap the selected @KNN and use its classification accuracy on the test set constituted by the @PatternList and @Categories objects as feedback to guide the search for the optimal feature weights. A @FeatureWeights object will be created.")
  420. ENTRY (U"Settings")
  421. TAG (U"##Learning rate")
  422. DEFINITION (U"The rate at which the maximum distance between the pivot and a random seed is decremented.")
  423. TAG (U"##Number of seeds")
  424. DEFINITION (U"The size of the feature weight neighbourhood.")
  425. TAG (U"##Stop at")
  426. DEFINITION (U"A value specifying a stopping criterion. When feature weights yielding accuracy estimates higher than the specified value the search will stop. A value of 1 imposes no constraints whereas a value of 0.5 will result in the termination of the search algorithm once feature weights resulting in an classification accuracy of 50 percent or better are found.")
  427. TAG (U"##Optimization")
  428. DEFINITION (U"Specifies whether to search for all features simultaneously or one at a time.")
  429. TAG (U"##k neighbours")
  430. DEFINITION (U"The size of the neighbourhood used for feedback classification.")
  431. TAG (U"##Vote weighting")
  432. DEFINITION (U"The type of vote weighting to be used.")
  433. ENTRY (U"See also:")
  434. LIST_ITEM (U"@@kNN classifiers 1.1.1.2. Wrapper-based feature weighting@")
  435. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  436. LIST_ITEM (U"@@kNN classifiers 1.1. Improving classification accuracy@")
  437. LIST_ITEM (U"@@KNN & PatternList & Categories & FeatureWeights: Evaluate...@")
  438. LIST_ITEM (U"@@kNN classifiers 1. What is a kNN classifier?@")
  439. LIST_ITEM (U"@@kNN classifiers@")
  440. MAN_END
  441. MAN_BEGIN (U"PatternList & Categories: To FeatureWeights...", U"Ola Söder", 20080809)
  442. INTRO (U"Compute an estimate of the optimal feature weights using the @@kNN classifiers 1.1.1.1. Filter-based feature weighting|RELIEF-F algorithm@.")
  443. ENTRY (U"Setting")
  444. TAG (U"##k neighbours")
  445. DEFINITION (U"The size of the neighbourhood.")
  446. ENTRY (U"See also:")
  447. LIST_ITEM (U"@@kNN classifiers 1.1.1.1. Filter-based feature weighting@")
  448. LIST_ITEM (U"@@kNN classifiers 1.1.1.2. Wrapper-based feature weighting@")
  449. LIST_ITEM (U"@@kNN classifiers@")
  450. MAN_END
  451. MAN_BEGIN (U"PatternList: To Categories...", U"Ola Söder", 20080728)
  452. INTRO (U"Split the given @PatternList into a fixed number of clusters using the @@k-means clustering|%%k%-means clustering algorithm@. A @Categories object containing numbered categories corresponding to the generated clusters will be created.")
  453. ENTRY (U"Settings")
  454. TAG (U"##k clusters")
  455. DEFINITION (U"The number of clusters to be generated.")
  456. TAG (U"##Cluster size ratio constraint")
  457. DEFINITION (U"The minimum allowed ratio between the smallest and the biggest cluster.")
  458. TAG (U"##Maximum number of reseeds")
  459. DEFINITION (U"The maximum allowed number of reseeds used to enforce the cluster size ratio constraint.")
  460. ENTRY (U"See also:")
  461. LIST_ITEM (U"@@kNN classifiers@")
  462. MAN_END
  463. MAN_BEGIN (U"PatternList & FeatureWeights: To Categories...", U"Ola Söder", 20080728)
  464. INTRO (U"Split the given @PatternList into a fixed number of clusters using the @@k-means clustering|%%k%-means clustering algorithm@ and the feature weights contained within the selected @FeatureWeights object. A @Categories object containing numbered categories corresponding to the generated clusters will be created.")
  465. ENTRY (U"Settings")
  466. TAG (U"##k clusters")
  467. DEFINITION (U"The number of clusters to be generated.")
  468. TAG (U"##Cluster size ratio constraint")
  469. DEFINITION (U"The minimum allowed ratio between the smallest and the biggest cluster.")
  470. TAG (U"##Maximum number of reseeds")
  471. DEFINITION (U"The maximum allowed number of reseeds used to enforce the cluster size ratio constraint.")
  472. ENTRY (U"See also:")
  473. LIST_ITEM (U"@@kNN classifiers@")
  474. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  475. MAN_END
  476. MAN_BEGIN (U"PatternList: To Dissimilarity...", U"Ola Söder", 20080718)
  477. INTRO (U"Generate a @Dissimilarity matrix from the selected @PatternList. Dissimilarities are computed using the @@Euclidean distance@")
  478. ENTRY (U"See also:")
  479. LIST_ITEM (U"@@Multidimensional scaling@")
  480. LIST_ITEM (U"@@kNN classifiers@")
  481. MAN_END
  482. MAN_BEGIN (U"PatternList & FeatureWeights: To Dissimilarity...", U"Ola Söder", 20080718)
  483. INTRO (U"Generate a @Dissimilarity matrix from the selected @PatternList using the feature weights contained within the selected @FeatureWeights object. Dissimilarities are computed using the @@Euclidean distance@.")
  484. ENTRY (U"See also:")
  485. LIST_ITEM (U"@@Multidimensional scaling@")
  486. LIST_ITEM (U"@@kNN classifiers 1.1.1. Feature weighting@")
  487. LIST_ITEM (U"@@kNN classifiers@")
  488. MAN_END
  489. MAN_BEGIN (U"Ke-Ping Zhao et al. (2003)", U"Ola Söder", 20080718)
  490. NORMAL (U"Ke-Ping Zhao et al. (2003): \"C-Pruner: An improved instance pruning algorithm.\" "
  491. "In %%Proceedings of the Second International Conference on Machine Learning and Cybernetics%, November 2003, 94\\--99.")
  492. MAN_END
  493. MAN_BEGIN (U"Igor Kononenko (1994)", U"Ola Söder", 20080718)
  494. NORMAL (U"Igor Kononenko (1994): \"Estimating attributes: Analysis and extensions of relief.\" "
  495. "In %%ECML-94: Proceedings of the European Conference on Machine Learning%, Secaucus, NJ, USA, 171\\--182. "
  496. "New York: Springer.")
  497. MAN_END
  498. }
  499. /* End of file manual_KNN.c */