]> git.uio.no Git - u/mrichter/AliRoot.git/blobdiff - PWGHF/vertexingHF/macros/AddTaskCFVertexingHF.C
Adding Tab values for QpPb analysis
[u/mrichter/AliRoot.git] / PWGHF / vertexingHF / macros / AddTaskCFVertexingHF.C
old mode 100644 (file)
new mode 100755 (executable)
index 7042f67..8b81631
@@ -27,8 +27,10 @@ const Int_t    minITSClusters = 5;
 
 const Float_t centmin_0_10 = 0.;
 const Float_t centmax_0_10 = 10.;
-const Float_t centmin_10_100 = 10.;
-const Float_t centmax_10_100 = 100.;
+const Float_t centmin_10_60 = 10.;
+const Float_t centmax_10_60 = 60.;
+const Float_t centmin_60_100 = 60.;
+const Float_t centmax_60_100 = 100.;
 const Float_t centmax = 100.;
 const Float_t fakemin = -0.5;
 const Float_t fakemax = 2.5.;
@@ -40,12 +42,28 @@ const Float_t multmin_0_20 = 0;
 const Float_t multmax_0_20 = 20;
 const Float_t multmin_20_50 = 20;
 const Float_t multmax_20_50 = 50;
-const Float_t multmin_50_102 = 50;
-const Float_t multmax_50_102 = 102;
-
+const Float_t multmin_50_80 = 50;
+const Float_t multmax_50_80 = 80;
+const Float_t multmin_80_100 = 80;
+const Float_t multmax_80_100 = 100;
+const Float_t multmin_100_400 = 100; // Only for pPb
+const Float_t multmax_100_400 = 400; // Only for pPb
+
+//
+// useWeight : flag for Pt weights (default are pp 2010 weights, functions per MC production existing)
+// useFlatPtWeight : flag to test flat Pt weights (computed for LHC10f7a MC)
+// useZWeight : flag to use z-vtx weight (used for systematics for now)
+// useNchWeight : flag to use weights on the distribution of simulated primary particles (default pp 2010)
+// useNtrkWeight : flag to use weights on the distribution of Ntracklets
+// isFinePtBin : flag for fine pt bin (100 MeV from 0 to 30 GeV)
+// multiplicityEstimator : varying the multiplicity (and not centrality) estimator
+// isPPData : flag to switch off centrality checks when runing on pp data (reduces a lot log files)
+// isPPbData : Flag for pPb data, changes the Ntrk bining
+// estimatorFilename, refMult : Ntrk vs z-vtx multiplicity correction file name and average value
+// isFineNtrkBin : gives Ntrk bins of 1 unit from 0-100 (200 for pPb)
 //----------------------------------------------------
 
-AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.root", TString cutObjectName="D0toKpiCutsStandard", TString suffix="", Int_t configuration = AliCFTaskVertexingHF::kSnail, Bool_t isKeepDfromB=kFALSE, Bool_t isKeepDfromBOnly=kFALSE, Int_t pdgCode = 421, Char_t isSign = 2, Bool_t useWeight=kFALSE, Bool_t useFlatPtWeight=kFALSE, Bool_t useZWeight=kFALSE)
+AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.root", TString cutObjectName="D0toKpiCutsStandard", TString suffix="", Int_t configuration = AliCFTaskVertexingHF::kCheetah, Bool_t isKeepDfromB=kFALSE, Bool_t isKeepDfromBOnly=kFALSE, Int_t pdgCode = 421, Char_t isSign = 2, Bool_t useWeight=kFALSE, Bool_t useFlatPtWeight=kFALSE, Bool_t useZWeight=kFALSE, Bool_t useNchWeight=kFALSE, Bool_t useNtrkWeight=kFALSE, Bool_t isFinePtBin=kFALSE, TString estimatorFilename="", Int_t multiplicityEstimator = AliCFTaskVertexingHF::kNtrk10, Bool_t isPPData=kFALSE, Bool_t isPPbData=kFALSE, Double_t refMult = 9.26, Bool_t isFineNtrkBin=kFALSE)
 {
        printf("Adding CF task using cuts from file %s\n",cutFile);
        if (configuration == AliCFTaskVertexingHF::kSnail){
@@ -109,6 +127,7 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
        ptmin_16_24 =  16.0 ;
        ptmax_16_24 =  24.0 ;
 
+
        //CONTAINER DEFINITION
        Info("AliCFTaskVertexingHF","SETUP CONTAINER");
        const Double_t phimax = 2*TMath::Pi();
@@ -123,16 +142,20 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
        const Int_t nbinpointing  = 50 ; //bins in cosPointingAngle
        const Int_t nbinphi  = 18 ; //bins in Phi
        const Int_t nbinzvtx  = 30 ; //bins in z vertex
-       const Int_t nbincent = 11;  //bins in centrality
-       const Int_t nbincent_0_10 = 2;  //bins in centrality between 0 and 10
-       const Int_t nbincent_10_100 = 9;  //bins in centrality between 10 and 100
+       const Int_t nbincent = 28;  //bins in centrality
+       const Int_t nbincent_0_10 = 4;  //bins in centrality between 0 and 10
+       const Int_t nbincent_10_60 = 20;  //bins in centrality between 10 and 60
+       const Int_t nbincent_60_100 = 4;  //bins in centrality between 60 and 100
        const Int_t nbinfake = 3;  //bins in fake
        const Int_t nbinpointingXY = 50;  //bins in cosPointingAngleXY
        const Int_t nbinnormDecayLXY = 20;  //bins in NormDecayLengthXY
-       const Int_t nbinmult = 48;  //bins in multiplicity (total number)
+       const Int_t nbinmult = 49;  //bins in multiplicity (total number)
        const Int_t nbinmult_0_20 = 20; //bins in multiplicity between 0 and 20
        const Int_t nbinmult_20_50 = 15; //bins in multiplicity between 20 and 50
-       const Int_t nbinmult_50_102 = 13; //bins in multiplicity between 50 and 102
+       const Int_t nbinmult_50_80 = 10; //bins in multiplicity between 50 and 100
+       const Int_t nbinmult_80_100 = 4; //bins in multiplicity between 50 and 100
+       const Int_t nbinmult_100_400 = 6; // Only on pPb bins in multiplicity between 100 and 400
+       if(isPPbData) nbinmult += nbinmult_100_400;
 
        //the sensitive variables, their indices
 
@@ -248,7 +271,31 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
        for(Int_t i=0; i<=nbinpt; i++) printf("binLimpT[%d]=%f\n",i,binLimpT[i]);  
        
        printf("pT: nbin (from cuts file) = %d\n",nbinpt);
-       
+
+       Double_t *binLimpTFine=new Double_t[300+1];
+       if(isFinePtBin) {
+         iBin[ipT]=300.;
+         for (Int_t ibin0 = 0 ; ibin0<300+1; ibin0++){
+           binLimpTFine[ibin0] = 0.1*ibin0;
+         }
+         printf("pT: nbins fine = 300\n");
+       }
+
+       // Fine Ntrk bining setting
+       Double_t *binLimmultFine;
+       Int_t nbinmultTmp=nbinmult;
+       if(isFineNtrkBin){
+         Int_t nbinLimmultFine=100;
+         if(isPPbData) nbinLimmultFine = 200;
+         const UInt_t nbinMultFine = nbinLimmultFine;
+         binLimmultFine = new Double_t[nbinMultFine+1];
+         for (Int_t ibin0 = 0 ; ibin0<nbinMultFine+1; ibin0++){
+           binLimmultFine[ibin0] = ibin0;
+         }
+         nbinmultTmp=nbinLimmultFine;
+       }
+       const Int_t nbinmultTot=nbinmultTmp;
+
        // defining now the binning for the other variables:
        
        iBin[iy]=nbiny;
@@ -263,7 +310,7 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
        iBin[ifake]=nbinfake;
        iBin[ipointingXY]=nbinpointingXY;
        iBin[inormDecayLXY]=nbinnormDecayLXY;
-       iBin[imult]=nbinmult;
+       iBin[imult]=nbinmultTot;
        
        //arrays for lower bounds :
        Double_t *binLimy=new Double_t[iBin[iy]+1];
@@ -280,6 +327,7 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
        Double_t *binLimnormDecayLXY=new Double_t[iBin[inormDecayLXY]+1];
        Double_t *binLimmult=new Double_t[iBin[imult]+1];
 
+
        // y
        for(Int_t i=0; i<=nbiny; i++) binLimy[i]=(Double_t)ymin  + (ymax-ymin)  /nbiny*(Double_t)i ;
 
@@ -308,10 +356,14 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
 
        // centrality
        for(Int_t i=0; i<=nbincent_0_10; i++) binLimcent[i]=(Double_t)centmin_0_10 + (centmax_0_10-centmin_0_10)/nbincent_0_10*(Double_t)i ; 
-       if (binLimcent[nbincent_0_10] != centmin_10_100)  {
+       if (binLimcent[nbincent_0_10] != centmin_10_60)  {
                Error("AliCFHeavyFlavourTaskMultiVarMultiStep","Calculated bin lim for cent - 1st range - differs from expected!\n");
        }
-       for(Int_t i=0; i<=nbincent_10_100; i++) binLimcent[i+nbincent_0_10]=(Double_t)centmin_10_100 + (centmax_10_100-centmin_10_100)/nbincent_10_100*(Double_t)i ; 
+       for(Int_t i=0; i<=nbincent_10_60; i++) binLimcent[i+nbincent_0_10]=(Double_t)centmin_10_60 + (centmax_10_60-centmin_10_60)/nbincent_10_60*(Double_t)i ;
+       if (binLimcent[nbincent_0_10+nbincent_10_60] != centmin_60_100)  {
+               Error("AliCFHeavyFlavourTaskMultiVarMultiStep","Calculated bin lim for cent - 2st range - differs from expected!\n");
+       }
+       for(Int_t i=0; i<=nbincent_60_100; i++) binLimcent[i+nbincent_0_10+nbincent_10_60]=(Double_t)centmin_60_100 + (centmax_60_100-centmin_60_100)/nbincent_60_100*(Double_t)i ;
 
        // fake
        for(Int_t i=0; i<=nbinfake; i++) {
@@ -330,23 +382,40 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
                Error("AliCFHeavyFlavourTaskMultiVarMultiStep","Calculated bin lim for mult - 1st range - differs from expected!\n");
        }
        for(Int_t i=0; i<=nbinmult_20_50; i++) binLimmult[i+nbinmult_0_20]=(Double_t)multmin_20_50 + (multmax_20_50-multmin_20_50)/nbinmult_20_50*(Double_t)i ; 
-       if (binLimmult[nbinmult_0_20+nbinmult_20_50] != multmin_50_102)  {
+       if (binLimmult[nbinmult_0_20+nbinmult_20_50] != multmin_50_80)  {
+               Error("AliCFHeavyFlavourTaskMultiVarMultiStep","Calculated bin lim for mult - 2nd range - differs from expected!\n");
+       }
+       for(Int_t i=0; i<=nbinmult_50_80; i++) binLimmult[i+nbinmult_0_20+nbinmult_20_50]=(Double_t)multmin_50_80 + (multmax_50_80-multmin_50_80)/nbinmult_50_80*(Double_t)i ; 
+       if (binLimmult[nbinmult_0_20+nbinmult_20_50+nbinmult_50_80] != multmin_80_100)  {
+               Error("AliCFHeavyFlavourTaskMultiVarMultiStep","Calculated bin lim for mult - 2nd range - differs from expected!\n");
+       }
+       for(Int_t i=0; i<=nbinmult_80_100; i++) binLimmult[i+nbinmult_0_20+nbinmult_20_50+nbinmult_50_80]=(Double_t)multmin_80_100 + (multmax_80_100-multmin_80_100)/nbinmult_80_100*(Double_t)i ; 
+       if (binLimmult[nbinmult_0_20+nbinmult_20_50+nbinmult_50_80+nbinmult_80_100] != multmin_100_400)  {
                Error("AliCFHeavyFlavourTaskMultiVarMultiStep","Calculated bin lim for mult - 2nd range - differs from expected!\n");
        }
-       for(Int_t i=0; i<=nbinmult_50_102; i++) binLimmult[i+nbinmult_0_20+nbinmult_20_50]=(Double_t)multmin_50_102 + (multmax_50_102-multmin_50_102)/nbinmult_50_102*(Double_t)i ; 
+
+       if(isPPbData){
+         for(Int_t i=0; i<=nbinmult_100_400; i++) binLimmult[i+nbinmult_0_20+nbinmult_20_50+nbinmult_50_80+nbinmult_80_100]=(Double_t)multmin_100_400 + (multmax_100_400-multmin_100_400)/nbinmult_100_400*(Double_t)i ; 
+       }
+
+       if(multiplicityEstimator==AliCFTaskVertexingHF::kVZERO) {
+         Int_t items = nbinmult_0_20+nbinmult_20_50+nbinmult_50_80+nbinmult_80_100;
+         if(isPPbData) items = nbinmult_0_20+nbinmult_20_50+nbinmult_50_80+nbinmult_80_100+nbinmult_100_400;
+         for(Int_t i=0; i<=items; i++) binLimmult[i]*= 68./12.;
+       }
 
        //one "container" for MC
        TString nameContainer="";
        if(!isKeepDfromB) {
-               nameContainer="CFHFccontainer0_CommonFramework";
+               nameContainer="CFHFccontainer0";
        }
        else  if(isKeepDfromBOnly){
-               nameContainer="CFHFccontainer0DfromB_CommonFramework";
+               nameContainer="CFHFccontainer0DfromB";
        }
        else  {
-               nameContainer="CFHFccontainer0allD_CommonFramework";      
+               nameContainer="CFHFccontainer0allD";      
        }
-
+       nameContainer += suffix;
        //Setting up the container grid... 
 
        AliCFContainer* container;
@@ -355,7 +424,8 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
                container = new AliCFContainer(nameContainer,"container for tracks",nstep,nvarTot,iBin);
                //setting the bin limits
                printf("pt\n");
-               container -> SetBinLimits(ipT,binLimpT);
+               if(isFinePtBin) container -> SetBinLimits(ipT,binLimpTFine); 
+               else            container -> SetBinLimits(ipT,binLimpT);
                printf("y\n");
                container -> SetBinLimits(iy,binLimy);
                printf("cts\n");
@@ -385,7 +455,8 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
                printf("normDecayLXY\n");
                container -> SetBinLimits(inormDecayLXY,binLimnormDecayLXY);
                printf("multiplicity\n");
-               container -> SetBinLimits(imult,binLimmult);
+               if(isFineNtrkBin) container -> SetBinLimits(imult,binLimmultFine);
+               else              container -> SetBinLimits(imult,binLimmult);
 
                container -> SetVarTitle(ipT,"pt");
                container -> SetVarTitle(iy,"y");
@@ -429,7 +500,8 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
 
                container = new AliCFContainer(nameContainer,"container for tracks",nstep,nvar,iBinFast);
                printf("pt\n");
-               container -> SetBinLimits(ipTFast,binLimpT);
+               if(isFinePtBin) container -> SetBinLimits(ipTFast,binLimpTFine); 
+               else            container -> SetBinLimits(ipTFast,binLimpT);
                printf("y\n");
                container -> SetBinLimits(iyFast,binLimy);
                printf("ct\n");
@@ -443,7 +515,8 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
                printf("fake\n");
                container -> SetBinLimits(ifakeFast,binLimfake);
                printf("multiplicity\n");
-               container -> SetBinLimits(imultFast,binLimmult);
+               if(isFineNtrkBin) container -> SetBinLimits(imultFast,binLimmultFine);
+               else              container -> SetBinLimits(imultFast,binLimmult);
 
                container -> SetVarTitle(ipTFast,"pt");
                container -> SetVarTitle(iyFast,"y");
@@ -543,43 +616,111 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
        task->SetFillFromGenerated(kFALSE);
        task->SetCFManager(man); //here is set the CF manager
        task->SetDecayChannel(2);
+       task->SetUseFlatPtWeight(useFlatPtWeight);
        task->SetUseWeight(useWeight);
-       task->SetUseFlatPtWeight(useFlatPtWeight); 
        task->SetUseZWeight(useZWeight);
        task->SetSign(isSign);
        task->SetCentralitySelection(kFALSE);
        task->SetFakeSelection(0);
        task->SetRejectCandidateIfNotFromQuark(kTRUE); // put to false if you want to keep HIJING D0!!
        task->SetUseMCVertex(kFALSE); // put to true if you want to do studies on pp
+       task->SetMultiplicityEstimator(multiplicityEstimator);
+       task->SetIsPPData(isPPData);
 
        if (isKeepDfromB && !isKeepDfromBOnly) task->SetDselection(2);
        if (isKeepDfromB && isKeepDfromBOnly) task->SetDselection(1);   
 
        TF1* funcWeight = 0x0;
        if (task->GetUseWeight()) {
-               funcWeight = (TF1*)fileCuts->Get("funcWeight");
-               if (funcWeight == 0x0){
-                       Printf("FONLL Weights will be used");
-               }
-               else {
-                       task->SetWeightFunction(funcWeight);
-                       Printf("User-defined Weights will be used. The function being:");
-                       task->GetWeightFunction(funcWeight)->Print();
-               }
+         funcWeight = (TF1*)fileCuts->Get("funcWeight");
+         if (funcWeight == 0x0){
+           Printf("FONLL Weights will be used");
+         }
+         else {
+           task->SetWeightFunction(funcWeight);
+           Printf("User-defined Weights will be used.");
+         }
        }
 
+       if(useNchWeight || useNtrkWeight){
+         TH1F *hNchPrimaries;
+         TH1F *hNchMeasured;
+         if(isPPbData) hNchPrimaries = (TH1F*)fileCuts->Get("hNtrUnCorrEvWithCandWeight");
+         else hNchPrimaries = (TH1F*)fileCuts->Get("hGenPrimaryParticlesInelGt0");
+         hNchMeasured = (TH1F*)fileCuts->Get("hNchMeasured");
+         if(hNchPrimaries) {
+           task->SetUseNchWeight(kTRUE);
+           task->SetMCNchHisto(hNchPrimaries);
+           if(isPPbData) task->SetUseNchTrackletsWeight();
+         } else {
+           AliFatal("Histogram for multiplicity weights not found");
+           return 0x0;
+         }
+         if(hNchMeasured) task->SetMeasuredNchHisto(hNchMeasured);
+         if(useNtrkWeight) task->SetUseNchTrackletsWeight();
+       }
+       
+       if(isPPbData) { 
+         task->SetIsPPbData(kTRUE); 
+       }
+   
+       if(estimatorFilename.EqualTo("") ) {
+         printf("Estimator file not provided, multiplicity corrected histograms will not be filled\n");
+         task->SetUseZvtxCorrectedNtrkEstimator(kFALSE);
+       } else{
+
+         TFile* fileEstimator=TFile::Open(estimatorFilename.Data());
+         if(!fileEstimator)  {
+           AliFatal("File with multiplicity estimator not found"); 
+           return;
+         }
+
+         task->SetUseZvtxCorrectedNtrkEstimator(kTRUE);
+         task->SetReferenceMultiplcity(refMult);
+
+         if (isPPbData) {     //Use LHC13 periods for mult correction if pPb data
+            const Char_t* periodNames[2] = {"LHC13b", "LHC13c"};
+            TProfile* multEstimatorAvg[2];
+            for(Int_t ip=0; ip<2; ip++) {
+             multEstimatorAvg[ip] = (TProfile*)(fileEstimator->Get(Form("SPDmult10_%s",periodNames[ip]))->Clone(Form("SPDmult10_%s_clone",periodNames[ip])));
+             if (!multEstimatorAvg[ip]) {
+               AliFatal(Form("Multiplicity estimator for %s not found! Please check your estimator file",periodNames[ip]));
+               return;
+             }
+            }
+            task->SetMultiplVsZProfileLHC13b(multEstimatorAvg[0]);
+            task->SetMultiplVsZProfileLHC13c(multEstimatorAvg[1]);
+         }
+         else {
+           const Char_t* periodNames[4] = {"LHC10b", "LHC10c", "LHC10d", "LHC10e"};   //else, assume pp (LHC10)
+            TProfile* multEstimatorAvg[4];
+            for(Int_t ip=0; ip<4; ip++) {
+             multEstimatorAvg[ip] = (TProfile*)(fileEstimator->Get(Form("SPDmult10_%s",periodNames[ip]))->Clone(Form("SPDmult10_%s_clone",periodNames[ip])));
+             if (!multEstimatorAvg[ip]) {
+               AliFatal(Form("Multiplicity estimator for %s not found! Please check your estimator file",periodNames[ip]));
+               return;
+             }
+            }
+            task->SetMultiplVsZProfileLHC10b(multEstimatorAvg[0]);
+            task->SetMultiplVsZProfileLHC10c(multEstimatorAvg[1]);
+            task->SetMultiplVsZProfileLHC10d(multEstimatorAvg[2]);
+            task->SetMultiplVsZProfileLHC10e(multEstimatorAvg[3]);
+         }
+
+       }
+       
+
        Printf("***************** CONTAINER SETTINGS *****************");       
        Printf("decay channel = %d",(Int_t)task->GetDecayChannel());
        Printf("FillFromGenerated = %d",(Int_t)task->GetFillFromGenerated());
        Printf("Dselection = %d",(Int_t)task->GetDselection());
        Printf("UseWeight = %d",(Int_t)task->GetUseWeight());
        if (task->GetUseWeight()) {
-               Printf("User-defined Weight function:");
-               task->GetWeightFunction(funcWeight)->Print();
-       }
-       else{
-               Printf("FONLL will be used for the weights");
+         if(funcWeight) Printf("User-defined Weight function");
+         else Printf("FONLL will be used for the weights");
        }
+
+       Printf("Use Nch weight = %d",(Int_t)task->GetUseNchWeight());
        Printf("Sign = %d",(Int_t)task->GetSign());
        Printf("Centrality selection = %d",(Int_t)task->GetCentralitySelection());
        Printf("Fake selection = %d",(Int_t)task->GetFakeSelection());
@@ -605,15 +746,16 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
 
        TString nameCorr="";
        if(!isKeepDfromB) {
-               nameCorr="CFHFcorr0_CommonFramework";
+               nameCorr="CFHFcorr0";
        }
        else  if(isKeepDfromBOnly){
-               nameCorr= "CFHFcorr0KeepDfromBOnly_CommonFramework";
+               nameCorr= "CFHFcorr0KeepDfromBOnly";
        }
        else  {
-               nameCorr="CFHFcorr0allD_CommonFramework";
+               nameCorr="CFHFcorr0allD";
 
        }
+       nameCorr += suffix;
 
         THnSparseD* correlation = new THnSparseD(nameCorr,"THnSparse with correlations",4,thnDim);
         Double_t** binEdges = new Double_t[2];
@@ -621,6 +763,7 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
         // set bin limits
 
         binEdges[0]= binLimpT;
+       if(isFinePtBin) binEdges[0] = binLimpTFine;
         binEdges[1]= binLimy;
 
         correlation->SetBinEdges(0,binEdges[0]);
@@ -644,28 +787,37 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
        // ----- output data -----
        
        TString outputfile = AliAnalysisManager::GetCommonFileName();
-       TString output1name="", output2name="", output3name="",output4name="";
+       TString output1name="", output2name="", output3name="",output4name="", output5name="";
        output2name=nameContainer;
        output3name=nameCorr;
+       output4name= "Cuts";
+       output5name= "coutProf";
        if(!isKeepDfromB) {
-               outputfile += ":PWG3_D2H_CFtaskD0toKpi_CommonFramework";
-               output1name="CFHFchist0_CommonFramework";
+               outputfile += ":PWG3_D2H_CFtaskD0toKpi";
+               output1name="CFHFchist0";
+               output3name+="_cOnly";
+               output4name+="_cOnly";
+               output5name+="_cOnly";
        }
        else  if(isKeepDfromBOnly){
-               outputfile += ":PWG3_D2H_CFtaskD0toKpiKeepDfromBOnly_CommonFramework";
-               output1name="CFHFchist0DfromB_CommonFramework";
+               outputfile += ":PWG3_D2H_CFtaskD0toKpiKeepDfromBOnly";
+               output1name="CFHFchist0DfromB";
+               output3name+="_bOnly";
+               output4name+="_bOnly";
+               output5name+="_bOnly";
        }
        else{
-               outputfile += ":PWG3_D2H_CFtaskD0toKpiKeepDfromB_CommonFramework";
-               output1name="CFHFchist0allD_CommonFramework";
+               outputfile += ":PWG3_D2H_CFtaskD0toKpiKeepDfromB";
+               output1name="CFHFchist0allD";
+               output3name+="_all";
+               output4name+="_all";
+               output5name+="_all";
        }
-       output4name= "Cuts_CommonFramework";
 
        outputfile += suffix;
        output1name += suffix;
-       output2name += suffix;
-       output3name += suffix;
        output4name += suffix;
+       output5name += suffix;
 
        //now comes user's output objects :
        // output TH1I for event counting
@@ -676,6 +828,9 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
         AliAnalysisDataContainer *coutput3 = mgr->CreateContainer(output3name, THnSparseD::Class(),AliAnalysisManager::kOutputContainer,outputfile.Data());
        // cuts
        AliAnalysisDataContainer *coutput4 = mgr->CreateContainer(output4name, AliRDHFCuts::Class(),AliAnalysisManager::kOutputContainer, outputfile.Data());
+       // estimators list
+       AliAnalysisDataContainer *coutput5 = mgr->CreateContainer(output5name, TList::Class(),AliAnalysisManager::kOutputContainer, outputfile.Data());
+
 
        mgr->AddTask(task);
        
@@ -684,6 +839,8 @@ AliCFTaskVertexingHF *AddTaskCFVertexingHF(const char* cutFile = "./D0toKpiCuts.
        mgr->ConnectOutput(task,2,coutput2);
         mgr->ConnectOutput(task,3,coutput3);
        mgr->ConnectOutput(task,4,coutput4);
+       mgr->ConnectOutput(task,5,coutput5);
+
        return task;
        
 }