Skip to content

Commit

Permalink
Merge branch 'master' of /local/private/glausp/gits/bitseq
Browse files Browse the repository at this point in the history
  • Loading branch information
Peter Glaus committed Nov 14, 2013
2 parents e9b559b + 4c17589 commit 02c6119
Show file tree
Hide file tree
Showing 5 changed files with 55 additions and 19 deletions.
9 changes: 8 additions & 1 deletion MyTimer.cpp
Expand Up @@ -9,7 +9,7 @@ void MyTimer::adjust(double &time,char f){//{{{
if(f=='h')time/=3600.0;
}//}}}
void MyTimer::write(double time,char f){//{{{
if(!quiet)messageF("[time: +%lf %c]\n",time,f);
if(!quiet)messageF("[time: +%.2lf %c]\n",time,f);
}//}}}
MyTimer::MyTimer(){//{{{
N=1;
Expand All @@ -33,6 +33,13 @@ double MyTimer::split(long timer, char f){//{{{
times[timer]=time(NULL);
return ret;
}//}}}
double MyTimer::getTime(long timer, char f){//{{{
if(timer>=N)return 0;
double ret;
ret=time(NULL)-times[timer];
adjust(ret,f);
return ret;
}//}}}
double MyTimer::current(long timer, char f){//{{{
if(timer>=N)return 0;
double ret;
Expand Down
1 change: 1 addition & 0 deletions MyTimer.h
Expand Up @@ -20,6 +20,7 @@ class MyTimer{
void setVerbose(){quiet=false;}
void start(long timer=0);
double split(long timer=0, char f='s');
double getTime(long timer=0, char f='s');
double current(long timer=0, char f='s');
double stop(long timer=0, char f='s');
};
Expand Down
4 changes: 3 additions & 1 deletion VariationalBayes.cpp
Expand Up @@ -129,6 +129,7 @@ void VariationalBayes::optimize(bool verbose,OPT_TYPE method,long maxIter,double
double boundOld,bound,squareNorm,squareNormOld=1,valBeta=0,valBetaDiv,natGrad_i,gradGamma_i,phiGradPhiSum_r;
double *gradPhi,*natGrad,*gradGamma,*searchDir,*tmpD,*phiOld;
gradPhi=natGrad=gradGamma=searchDir=tmpD=phiOld=NULL;
MyTimer timer;
// allocate stuff {{{
//SimpleSparse *phiGradPhi=new SimpleSparse(beta);
gradPhi = new double[T];
Expand All @@ -151,6 +152,7 @@ void VariationalBayes::optimize(bool verbose,OPT_TYPE method,long maxIter,double
#endif
#endif
boundOld=getBound();
timer.start();
while(true){
negGradient(gradPhi);
// "yuck"
Expand Down Expand Up @@ -248,7 +250,7 @@ void VariationalBayes::optimize(bool verbose,OPT_TYPE method,long maxIter,double
#ifdef SHOW_FIXED
messageF("iter(%c): %5.ld bound: %.3lf grad: %.7lf beta: %.7lf fixed: %ld\n",(usedSteepest?'s':'o'),iteration,bound,squareNorm,valBeta,phi->countAboveDelta(0.999));
#else
messageF("iter(%c): %5.ld bound: %.3lf grad: %.7lf beta: %.7lf\n",(usedSteepest?'s':'o'),iteration,bound,squareNorm,valBeta);
messageF("iter(%c)[%5.lds]: %5.ld bound: %.3lf grad: %.7lf beta: %.7lf\n",(usedSteepest?'s':'o'),(long)timer.getTime(),iteration,bound,squareNorm,valBeta);
#endif
}else if(!quiet){
messageF("\riter(%c): %5.ld bound: %.3lf grad: %.7lf beta: %.7lf ",(usedSteepest?'s':'o'),iteration,bound,squareNorm,valBeta);
Expand Down
3 changes: 3 additions & 0 deletions changeList
Expand Up @@ -2,6 +2,9 @@
Improvements:
- parseAlignment adding option to mateNamesDiffer [!! add note that this can't be used with mixed alignments !!]

Bug fixes:
- estimateExpression didn't use more cores when number of chains was changed in parameters file.

0.7.4
[0.7.3] (bug in copying alignment)
[0.7.2]
Expand Down
57 changes: 40 additions & 17 deletions estimateExpression.cpp
Expand Up @@ -24,6 +24,7 @@
#define SS second

//#define LOG_NEED
//#define LOG_RHAT
TranscriptInfo trInfo;

long M;//, mAll; // M : number of transcripts (include transcript 0 ~ Noise)
Expand Down Expand Up @@ -155,6 +156,19 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
vector<pairD> betwVar(M),withVar(M),s2j(M),totAverage(M),av,var;
vector<pair<pairD,long> > rHat2(M);
// }}}
// Names: {{{
stringstream sstr;
#ifdef LOG_RHAT
sstr.str("");
sstr<<args.getS("outFilePrefix")<<".rhatLog";
string rhatLogFile = sstr.str();
#endif
#ifdef LOG_NEED
sstr.str("");
sstr<<args.getS("outFilePrefix")<<".effLog";
string effLogFile = sstr.str();
#endif
// }}}
// Init: {{{
DEBUG(message("Initialization:\n"));
samplesN=gPar.samplesN();
Expand All @@ -170,7 +184,8 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
samplers[j] = new GibbsSampler;
}

timer.start();;
timer.start();
timer.start(1);
if(args.isSet("seed"))seed=args.getL("seed");
else seed = time(NULL);
if(args.verbose)message("seed: %ld\n",seed);
Expand Down Expand Up @@ -208,6 +223,7 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
}
}
#endif
totalSamples = gPar.burnIn();
message("Burn in: %ld DONE. ",gPar.burnIn());
DEBUG(message(" reseting samplers after BurnIn\n"));
for(i=0;i<chainsN;i++){
Expand Down Expand Up @@ -244,7 +260,7 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
}
}
#endif
totalSamples+=samplesN*chainsN;
totalSamples += samplesN;
message("\nSampling DONE. ");
timer.split(0,'m');
//}}}
Expand Down Expand Up @@ -320,13 +336,23 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
if(args.flag("gibbs"))message(" Mean thetaAct (noise parameter)\n %lf\n",totAverage[0].FF);
messageF("\n");
//}}}
// Log rHat if necessary. {{{
#ifdef LOG_RHAT
ofstream rhatLog(rhatLogFile.c_str(), ofstream::app);
rhatLog<<totalSamples<<" "<<(long)timer.getTime(1);
for(i=1;i<M;i++){
rhatLog<<" "<<sqrt(rHat2[i].FF.FF);
}
rhatLog<<endl;
rhatLog.close();
#endif
// }}}
// Increase sample size and start over: {{{
if(quitNext){// Sampling iterations end {{{
if(sqrt(rHat2[0].FF.FF) > gPar.targetScaleReduction()){
message("WARNING: Following transcripts failed to converge entirely\n (however the estimates might still be usable):\n");
long countUncoverged=0;
stringstream sstr;
sstr.str();
sstr.str("");
sstr<<"# unconverged_transcripts: ";
for(i=0;(i<M) && (sqrt(rHat2[i].FF.FF) > gPar.targetScaleReduction());i++){
sstr<<rHat2[i].SS<<" ("<<sqrt(rHat2[i].FF.FF)<<") ";
Expand Down Expand Up @@ -358,9 +384,7 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
}
// log the number of effective samples, only when testing... //{{{
#ifdef LOG_NEED
stringstream sstr;
sstr<<args.getS("outFilePrefix")<<".effLog";
ofstream effLog(sstr.str().c_str());
ofstream effLog(effLogFile.c_str());
for(i=1;i<M;i++){
effLog<<needS[rHat2[i].SS]<<" "<<sqrt(rHat2[i].FF.FF)<<" "<<samplesHave*betwVar[rHat2[i].SS].FF<<" "<<withVar[rHat2[i].SS].FF<<" "<<rHat2[i].SS<<endl;
}
Expand All @@ -378,7 +402,7 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
// Prepare for producing samples if Rhat^2<target scale reduction
// OR reached samplesNmax
// OR produced too many samples (>500 000)
if((totalSamples < 5000000) && (rMean.FF > gPar.targetScaleReduction())){
if((totalSamples*chainsN < 5000000) && (rMean.FF > gPar.targetScaleReduction())){
samplesN *= 2;
}else{
quitNext = true;
Expand All @@ -395,7 +419,6 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
if(samplesN<samplesSave){
samplesSave = samplesN;
}
stringstream sstr;
for(j=0;j<chainsN;j++){
sstr.str("");
sstr<<args.getS("outFilePrefix")<<"."<<args.getS("outputType")<<"S-"<<j;
Expand Down Expand Up @@ -477,7 +500,7 @@ void MCMC(TagAlignments *alignments,gibbsParameters &gPar,ArgumentParser &args){
}
// delete [] samplers;
//}}}
message("Total samples: %ld\n",totalSamples);
message("Total samples: %ld\n",totalSamples*chainsN);
}//}}}

extern "C" int estimateExpression(int *argc, char* argv[]) {//{{{
Expand Down Expand Up @@ -509,13 +532,6 @@ string programDescription =
if(args.verbose)buildTime(argv[0],__DATE__,__TIME__);
// }}}
MyTimer timer;
#ifdef SUPPORT_OPENMP
if(args.isSet("procN"))
omp_set_num_threads(args.getL("procN"));
else
omp_set_num_threads(args.getL("MCMC_chainsN"));
#endif

gibbsParameters gPar;
TagAlignments *alignments=NULL;
//{{{ Initialization:
Expand All @@ -526,6 +542,13 @@ string programDescription =
}
args.updateS("outputType", ns_expression::getOutputType(args));
if(args.verbose)gPar.getAllParameters();
#ifdef SUPPORT_OPENMP
if(args.isSet("procN"))
omp_set_num_threads(args.getL("procN"));
else
omp_set_num_threads(gPar.chainsN());
#endif


//}}}
// {{{ Read transcriptInfo and .prob file
Expand Down

0 comments on commit 02c6119

Please sign in to comment.