X-Git-Url: https://git.donarmstrong.com/?a=blobdiff_plain;f=cooccurrencecommand.cpp;h=e4c915d6a14e4a9755a17d64c073fdda1440803b;hb=36a6b02cf7f09d2bc34376b588944a9ca73429c5;hp=8c763e275e5d9cebf885794fafdc3c9f44f0cb6a;hpb=d2390ede25cc211f95f99e377d5654bad7e6950e;p=mothur.git diff --git a/cooccurrencecommand.cpp b/cooccurrencecommand.cpp index 8c763e2..e4c915d 100644 --- a/cooccurrencecommand.cpp +++ b/cooccurrencecommand.cpp @@ -180,7 +180,7 @@ int CooccurrenceCommand::execute(){ m->openOutputFile(outputFileName, out); outputNames.push_back(outputFileName); outputTypes["summary"].push_back(outputFileName); out.setf(ios::fixed, ios::floatfield); out.setf(ios::showpoint); - out << "metric\tlabel\tScore\tpValue\n"; + out << "metric\tlabel\tScore\tzScore\tstandardDeviation\n"; //as long as you are not at the end of the file or done wih the lines you want while((lookup[0] != NULL) && ((allLines == 1) || (userLabels.size() != 0))) { @@ -272,6 +272,12 @@ int CooccurrenceCommand::execute(){ int CooccurrenceCommand::getCooccurrence(vector& thisLookUp, ofstream& out){ try { int numOTUS = thisLookUp[0]->getNumBins(); + + if(numOTUS < 2) { + m->mothurOut("Not enough OTUs for co-occurrence analysis, skipping"); m->mothurOutEndLine(); + return 0; + } + vector< vector > co_matrix; co_matrix.resize(thisLookUp[0]->getNumBins()); for (int i = 0; i < thisLookUp[0]->getNumBins(); i++) { co_matrix[i].resize((thisLookUp.size()), 0); } vector columntotal; columntotal.resize(thisLookUp.size(), 0); @@ -494,6 +500,7 @@ int CooccurrenceCommand::getCooccurrence(vector& thisLookUp //swap_checkerboards takes the original matrix and swaps checkerboards else if(matrix == "sim9") { trial.swap_checkerboards (co_matrix, ncols, nrows); + nullmatrix = co_matrix; } else { m->mothurOut("[ERROR]: No null model selected!\n\n"); m->control_pressed = true; @@ -529,12 +536,20 @@ int CooccurrenceCommand::getCooccurrence(vector& thisLookUp m->mothurOutEndLine(); m->mothurOut("average metric score: " + toString(nullMean)); m->mothurOutEndLine(); + //calc_p_value is not a statistical p-value, it's just the average that are either > or < the initscore. + //All it does is show what is expected in a competitively structured community + //zscore is output so p-value can be looked up in a ztable double pvalue = 0.0; if (metric == "cscore" || metric == "checker") { pvalue = trial.calc_pvalue_greaterthan (stats, initscore); } else{ pvalue = trial.calc_pvalue_lessthan (stats, initscore); } + + double sd = trial.getSD(runs, stats, nullMean); + + double zscore = trial.get_zscore(sd, nullMean, initscore); - m->mothurOut("pvalue: " + toString(pvalue)); m->mothurOutEndLine(); - out << metric << '\t' << thisLookUp[0]->getLabel() << '\t' << nullMean << '\t' << pvalue << endl; + m->mothurOut("zscore: " + toString(zscore)); m->mothurOutEndLine(); + m->mothurOut("standard deviation: " + toString(sd)); m->mothurOutEndLine(); + out << metric << '\t' << thisLookUp[0]->getLabel() << '\t' << nullMean << '\t' << zscore << '\t' << sd << endl; return 0; }