\relax \citation{NB-performance} \@writefile{toc}{\contentsline {section}{\numberline {1}Building the Experiment}{3}} \@writefile{toc}{\contentsline {subsection}{\numberline {1.1}Number of Attributes}{3}} \@writefile{toc}{\contentsline {subsection}{\numberline {1.2}Classifiers}{3}} \citation{quinlanC4.5} \citation{quinlanID3} \citation{Mitchell97DescTree} \citation{holte93} \newlabel{eq:bayesTheorem}{{1}{4}} \citation{Freund99thealternating} \citation{Heckerman96atutorial} \@writefile{lof}{\contentsline {figure}{\numberline {1}{\ignorespaces A decision tree consists of a root node and descending children nodes who denote decisions to make in the tree's strucure. This tree, for example, was constructed in an attempt to optimize investment portfolios by minimizing budgets and maximizing payoffs. The top-most branch represents the best selection in this example.}}{5}} \newlabel{fig:decisionTree}{{1}{5}} \citation{rbfnIntroBors} \@writefile{lof}{\contentsline {figure}{\numberline {2}{\ignorespaces In this simple bayesian network, the variable {\em Sprinkler} is dependent upon whether or not its raining; the sprinkler is generally not turned on when it's raining. However, either event is able to cause the grass to become wet - if it's raining, or if the sprinkler is caused to turn on. Thus, Bayesian networks excel at investigating information relating to relationships between variables.}}{6}} \newlabel{fig:bayesnetwork}{{2}{6}} \citation{Hall00correlation-basedfeature} \citation{Mitchell97} \citation{Moore06} \@writefile{toc}{\contentsline {subsection}{\numberline {1.3}Feature Subset Selectors}{7}} \@writefile{toc}{\contentsline {subsection}{\numberline {1.4}Cross-Validation}{8}} \@writefile{toc}{\contentsline {section}{\numberline {2}Analysis of Experimental Results}{9}} \@writefile{toc}{\contentsline {subsection}{\numberline {2.1}Evaluation Metrics}{9}} \@writefile{toc}{\contentsline {subsection}{\numberline {2.2}Visualizing the Results}{9}} \@writefile{lof}{\contentsline {figure}{\numberline {3}{\ignorespaces Probability of Detection (PD) and Probability of False Alarm (PF) with variances for first year retention.}}{10}} \newlabel{fig:ret1graphs}{{3}{10}} \@writefile{lof}{\contentsline {figure}{\numberline {4}{\ignorespaces Probability of Detection (PD) and Probability of False Alarm (PF) with variances for second year retention.}}{11}} \newlabel{fig:ret2graphs}{{4}{11}} \@writefile{lof}{\contentsline {figure}{\numberline {5}{\ignorespaces Probability of Detection (PD) and Probability of False Alarm (PF) with variances for third year retention.}}{12}} \newlabel{fig:ret3graphs}{{5}{12}} \@writefile{toc}{\contentsline {subsection}{\numberline {2.3}Narrowing the Search}{13}} \@writefile{toc}{\contentsline {subsubsection}{\numberline {2.3.1}Ranking with the Mann-Whitney Test}{13}} \bibstyle{plain} \@writefile{lof}{\contentsline {figure}{\numberline {6}{\ignorespaces The top ten ranking treatments for third year retention. Ranks represent how many times a particular treatment wins over all other treatments in the experiment.}}{14}} \newlabel{fig:ranktable}{{6}{14}} \@writefile{toc}{\contentsline {subsection}{\numberline {2.4}Selected FSS and Classifier}{14}} \bibdata{refs.bib} \bibcite{rbfnIntroBors}{1} \bibcite{Moore06}{2} \bibcite{Freund99thealternating}{3} \bibcite{Hall00correlation-basedfeature}{4} \bibcite{Heckerman96atutorial}{5} \bibcite{holte93}{6} \bibcite{Mitchell97DescTree}{7} \bibcite{Mitchell97}{8} \bibcite{quinlanID3}{9} \bibcite{quinlanC4.5}{10} \bibcite{NB-performance}{11}