]> git.donarmstrong.com Git - mothur.git/blob - randomforest.cpp
adding pretty confusion matrix
[mothur.git] / randomforest.cpp
1 //
2 //  randomforest.cpp
3 //  Mothur
4 //
5 //  Created by Sarah Westcott on 10/2/12.
6 //  Copyright (c) 2012 Schloss Lab. All rights reserved.
7 //
8
9 #include "randomforest.hpp" 
10
11 /***********************************************************************/
12
13 RandomForest::RandomForest(const vector <vector<int> > dataSet,
14                            const int numDecisionTrees,
15                            const string treeSplitCriterion = "gainratio",
16                            const bool doPruning = false,
17                            const float pruneAggressiveness = 0.9,
18                            const bool discardHighErrorTrees = true,
19                            const float highErrorTreeDiscardThreshold = 0.4,
20                            const string optimumFeatureSubsetSelectionCriteria = "log2",
21                            const float featureStandardDeviationThreshold = 0.0)
22             : Forest(dataSet, numDecisionTrees, treeSplitCriterion, doPruning, pruneAggressiveness, discardHighErrorTrees, highErrorTreeDiscardThreshold, optimumFeatureSubsetSelectionCriteria, featureStandardDeviationThreshold) {
23     m = MothurOut::getInstance();
24 }
25
26 /***********************************************************************/
27 // DONE
28 int RandomForest::calcForrestErrorRate() {
29     try {
30         int numCorrect = 0;
31         for (map<int, vector<int> >::iterator it = globalOutOfBagEstimates.begin(); it != globalOutOfBagEstimates.end(); it++) {
32             
33             if (m->control_pressed) { return 0; }
34             
35             int indexOfSample = it->first;
36             vector<int> predictedOutComes = it->second;
37             vector<int>::iterator maxPredictedOutComeIterator = max_element(predictedOutComes.begin(), predictedOutComes.end());
38             int majorityVotedOutcome = (int)(maxPredictedOutComeIterator - predictedOutComes.begin());
39             int realOutcome = dataSet[indexOfSample][numFeatures];
40                                    
41             if (majorityVotedOutcome == realOutcome) { numCorrect++; }
42         }
43         
44         // TODO: save or return forrestErrorRate for future use;
45         double forrestErrorRate = 1 - ((double)numCorrect / (double)globalOutOfBagEstimates.size());
46         
47         m->mothurOut("numCorrect = " + toString(numCorrect)+ "\n");
48         m->mothurOut("forrestErrorRate = " + toString(forrestErrorRate)+ "\n");
49             
50         return 0;
51     }
52         catch(exception& e) {
53                 m->errorOut(e, "RandomForest", "calcForrestErrorRate");
54                 exit(1);
55         } 
56 }
57 /***********************************************************************/
58
59 int RandomForest::printConfusionMatrix(map<int, string> intToTreatmentMap) {
60     try {
61         int numGroups = intToTreatmentMap.size();
62         vector<vector<int> > cm(numGroups, vector<int>(numGroups, 0));
63         
64         for (map<int, vector<int> >::iterator it = globalOutOfBagEstimates.begin(); it != globalOutOfBagEstimates.end(); it++) {
65             
66             if (m->control_pressed) { return 0; }
67             
68             int indexOfSample = it->first; //key
69             vector<int> predictedOutComes = it->second; //value, vector of all predicted classes
70             vector<int>::iterator maxPredictedOutComeIterator = max_element(predictedOutComes.begin(), predictedOutComes.end());
71             int majorityVotedOutcome = (int)(maxPredictedOutComeIterator - predictedOutComes.begin());
72             int realOutcome = dataSet[indexOfSample][numFeatures];                       
73             cm[realOutcome][majorityVotedOutcome] = cm[realOutcome][majorityVotedOutcome] + 1;
74         }
75         
76         vector<int> fw;
77         for (int w = 0; w <numGroups; w++) {
78             fw.push_back(intToTreatmentMap[w].length());
79         }
80         
81         m->mothurOut("confusion matrix:\n\t\t");
82         for (int k = 0; k < numGroups; k++) {
83             //m->mothurOut(intToTreatmentMap[k] + "\t");
84             cout << setw(fw[k]) << intToTreatmentMap[k] << "\t";
85         }
86         for (int i = 0; i < numGroups; i++) {
87             cout << "\n" << setw(fw[i]) << intToTreatmentMap[i] << "\t";
88             //m->mothurOut("\n" + intToTreatmentMap[i] + "\t");
89             if (m->control_pressed) { return 0; }
90             for (int j = 0; j < numGroups; j++) {
91                 //m->mothurOut(toString(cm[i][j]) + "\t");
92                 cout << setw(fw[i]) << cm[i][j] << "\t";
93             }    
94         }
95         //m->mothurOut("\n");
96         cout << "\n";
97
98         return 0;
99     }
100     
101     catch(exception& e) {
102                 m->errorOut(e, "RandomForest", "printConfusionMatrix");
103                 exit(1);
104         }
105 }
106
107 /***********************************************************************/
108 int RandomForest::calcForrestVariableImportance(string filename) {
109     try {
110     
111         // follow the link: http://en.wikipedia.org/wiki/Dynamic_cast
112         //if you are going to dynamically cast, aren't you undoing the advantage of abstraction. Why abstract at all?
113         //could cause maintenance issues later if other types of Abstract decison trees are created that cannot be cast as a decision tree.
114         for (int i = 0; i < decisionTrees.size(); i++) {
115             if (m->control_pressed) { return 0; }
116             
117             DecisionTree* decisionTree = dynamic_cast<DecisionTree*>(decisionTrees[i]);
118             
119             for (int j = 0; j < numFeatures; j++) {
120                 globalVariableImportanceList[j] += (double)decisionTree->variableImportanceList[j];
121             }
122         }
123         
124         for (int i = 0;  i < numFeatures; i++) {
125             globalVariableImportanceList[i] /= (double)numDecisionTrees;
126         }
127         
128         vector< pair<int, double> > globalVariableRanks;
129         for (int i = 0; i < globalVariableImportanceList.size(); i++) {
130             //cout << "[" << i << ',' << globalVariableImportanceList[i] << "], ";
131             if (globalVariableImportanceList[i] > 0) {
132                 pair<int, double> globalVariableRank(0, 0.0);
133                 globalVariableRank.first = i;
134                 globalVariableRank.second = globalVariableImportanceList[i];
135                 globalVariableRanks.push_back(globalVariableRank);
136             }
137         }
138         
139 //        for (int i = 0; i < globalVariableRanks.size(); i++) {
140 //            cout << m->currentBinLabels[(int)globalVariableRanks[i][0]] << '\t' << globalVariableImportanceList[globalVariableRanks[i][0]] << endl;
141 //        }
142
143         
144         VariableRankDescendingSorterDouble variableRankDescendingSorter;
145         sort(globalVariableRanks.begin(), globalVariableRanks.end(), variableRankDescendingSorter);
146         
147         ofstream out;
148         m->openOutputFile(filename, out);
149         out <<"OTU\tMean decrease accuracy\n";
150         for (int i = 0; i < globalVariableRanks.size(); i++) {
151             out << m->currentBinLabels[(int)globalVariableRanks[i].first] << '\t' << globalVariableImportanceList[globalVariableRanks[i].first] << endl;
152         }
153         out.close();
154         return 0;
155     }
156         catch(exception& e) {
157                 m->errorOut(e, "RandomForest", "calcForrestVariableImportance");
158                 exit(1);
159         }  
160 }
161 /***********************************************************************/
162 int RandomForest::populateDecisionTrees() {
163     try {
164         
165         vector<double> errorRateImprovements;
166         
167         for (int i = 0; i < numDecisionTrees; i++) {
168           
169             if (m->control_pressed) { return 0; }
170             if (((i+1) % 100) == 0) {  m->mothurOut("Creating " + toString(i+1) + " (th) Decision tree\n");  }
171           
172             // TODO: need to first fix if we are going to use pointer based system or anything else
173             DecisionTree* decisionTree = new DecisionTree(dataSet, globalDiscardedFeatureIndices, OptimumFeatureSubsetSelector(optimumFeatureSubsetSelectionCriteria), treeSplitCriterion, featureStandardDeviationThreshold);
174           
175             if (m->debug && doPruning) {
176                 m->mothurOut("Before pruning\n");
177                 decisionTree->printTree(decisionTree->rootNode, "ROOT");
178             }
179             
180             int numCorrect;
181             double treeErrorRate;
182             
183             decisionTree->calcTreeErrorRate(numCorrect, treeErrorRate);
184             double prePrunedErrorRate = treeErrorRate;
185             
186             if (m->debug) {
187                 m->mothurOut("treeErrorRate: " + toString(treeErrorRate) + " numCorrect: " + toString(numCorrect) + "\n");
188             }
189             
190             if (doPruning) {
191                 decisionTree->pruneTree(pruneAggressiveness);
192                 if (m->debug) {
193                     m->mothurOut("After pruning\n");
194                     decisionTree->printTree(decisionTree->rootNode, "ROOT");
195                 }
196                 decisionTree->calcTreeErrorRate(numCorrect, treeErrorRate);
197             }
198             double postPrunedErrorRate = treeErrorRate;
199             
200           
201             decisionTree->calcTreeVariableImportanceAndError(numCorrect, treeErrorRate);
202             double errorRateImprovement = (prePrunedErrorRate - postPrunedErrorRate) / prePrunedErrorRate;
203
204             if (m->debug) {
205                 m->mothurOut("treeErrorRate: " + toString(treeErrorRate) + " numCorrect: " + toString(numCorrect) + "\n");
206                 if (doPruning) {
207                     m->mothurOut("errorRateImprovement: " + toString(errorRateImprovement) + "\n");
208                 }
209             }
210             
211             
212             if (discardHighErrorTrees) {
213                 if (treeErrorRate < highErrorTreeDiscardThreshold) {
214                     updateGlobalOutOfBagEstimates(decisionTree);
215                     decisionTree->purgeDataSetsFromTree();
216                     decisionTrees.push_back(decisionTree);
217                     if (doPruning) {
218                         errorRateImprovements.push_back(errorRateImprovement);
219                     }
220                 } else {
221                     delete decisionTree;
222                 }
223             } else {
224                 updateGlobalOutOfBagEstimates(decisionTree);
225                 decisionTree->purgeDataSetsFromTree();
226                 decisionTrees.push_back(decisionTree);
227                 if (doPruning) {
228                     errorRateImprovements.push_back(errorRateImprovement);
229                 }
230             }          
231         }
232         
233         double avgErrorRateImprovement = -1.0;
234         if (errorRateImprovements.size() > 0) {
235             avgErrorRateImprovement = accumulate(errorRateImprovements.begin(), errorRateImprovements.end(), 0.0);
236 //            cout << "Total " << avgErrorRateImprovement << " size " << errorRateImprovements.size() << endl;
237             avgErrorRateImprovement /= errorRateImprovements.size();
238         }
239         
240         if (m->debug && doPruning) {
241             m->mothurOut("avgErrorRateImprovement:" + toString(avgErrorRateImprovement) + "\n");
242         }
243         // m->mothurOut("globalOutOfBagEstimates = " + toStringVectorMap(globalOutOfBagEstimates)+ "\n");
244
245         
246         return 0;
247     }
248     catch(exception& e) {
249         m->errorOut(e, "RandomForest", "populateDecisionTrees");
250         exit(1);
251     }  
252 }
253 /***********************************************************************/
254 // TODO: need to finalize bettween reference and pointer for DecisionTree [partially solved]
255 // DONE: make this pure virtual in superclass
256 // DONE
257 int RandomForest::updateGlobalOutOfBagEstimates(DecisionTree* decisionTree) {
258     try {
259         for (map<int, int>::iterator it = decisionTree->outOfBagEstimates.begin(); it != decisionTree->outOfBagEstimates.end(); it++) {
260             
261             if (m->control_pressed) { return 0; }
262             
263             int indexOfSample = it->first;
264             int predictedOutcomeOfSample = it->second;
265             
266             if (globalOutOfBagEstimates.count(indexOfSample) == 0) {
267                 globalOutOfBagEstimates[indexOfSample] = vector<int>(decisionTree->numOutputClasses, 0);
268             };
269             
270             globalOutOfBagEstimates[indexOfSample][predictedOutcomeOfSample] += 1;
271         }
272         return 0;
273     }
274     catch(exception& e) {
275         m->errorOut(e, "RandomForest", "updateGlobalOutOfBagEstimates");
276         exit(1);
277     }  
278 }
279 /***********************************************************************/
280
281