@article{Shinmura_2015, title={A Trivial Linear Discriminant Function}, volume={3}, url={http://www.iapress.org/index.php/soic/article/view/20151202}, DOI={10.19139/soic.v3i4.151}, abstractNote={<p>In this paper, we focus on the new model selection procedure of the discriminant analysis. Combining re-sampling technique with k-fold cross validation, we develop a k-fold cross validation for small sample method. By this breakthrough, we obtain the mean error rate in the validation samples (M2) and the 95\% confidence interval (CI) of discriminant coefficient. Moreover, we propose the model  selection  procedure  in  which  the model having a minimum M2 was  chosen  to  the  best  model.  We  apply  this  new  method and procedure to the pass/ fail determination of  exam  scores.  In  this  case,  we  fix  the constant =1 for seven linear discriminant  functions  (LDFs)  and  several  good  results  were obtained as follows: 1) M2 of Fisher’s LDF are over 4.6\% worse than Revised IP-OLDF. 2) A soft-margin  SVM  for  penalty c=1  (SVM1)  is  worse  than  another  mathematical  programming (MP) based LDFs and logistic regression . 3) The 95\% CI of the best discriminant coefficients was obtained. Seven LDFs except for Fisher’s LDF are almost the same as a trivial LDF for the linear separable model. Furthermore, if we choose the median of the coefficient of seven LDFs except for Fisher’s LDF,  those are almost the same as the trivial LDF for the linear separable model.</p&gt;}, number={4}, journal={Statistics, Optimization & Information Computing}, author={Shinmura, Shuichi}, year={2015}, month={Nov.}, pages={322-335} }